mirror of
https://github.com/unraid/api.git
synced 2026-01-01 22:20:05 -06:00
feat: nestjs initial query implementation (#748)
* feat: nestjs initial query implementation * feat: more permissions and resolver cleanup * fix: back to ubuntu to remain compatible with pkg docker building * feat: listen on socket as well as ports * feat: swap to bookworm instead of ubuntu
This commit is contained in:
1
api/.dockerignore
Normal file
1
api/.dockerignore
Normal file
@@ -0,0 +1 @@
|
||||
node_modules/
|
||||
@@ -1,18 +1,19 @@
|
||||
###########################################################
|
||||
# Development/Build Image
|
||||
###########################################################
|
||||
FROM node:18.17.1-alpine As development
|
||||
FROM node:18.17.1-bookworm-slim As development
|
||||
|
||||
# Install build tools and dependencies
|
||||
RUN apk add --no-cache \
|
||||
RUN apt-get update -y && apt-get install -y \
|
||||
bash \
|
||||
# Real PS Command (needed for some dependencies)
|
||||
procps \
|
||||
alpine-sdk \
|
||||
python3 \
|
||||
libvirt-dev \
|
||||
jq \
|
||||
zstd
|
||||
zstd \
|
||||
git \
|
||||
build-essential
|
||||
|
||||
RUN mkdir /var/log/unraid-api/
|
||||
|
||||
@@ -33,7 +34,7 @@ COPY package.json package-lock.json ./
|
||||
RUN npm i -g pkg zx
|
||||
|
||||
# Install deps
|
||||
RUN npm ci
|
||||
RUN npm i
|
||||
|
||||
EXPOSE 4000
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
[api]
|
||||
version="3.2.3+da53f636"
|
||||
version="3.2.3+30d451e0"
|
||||
extraOrigins=""
|
||||
[local]
|
||||
[notifier]
|
||||
@@ -16,5 +16,6 @@ regWizTime="1611175408732_0951-1653-3509-FBA155FA23C0"
|
||||
idtoken=""
|
||||
accesstoken=""
|
||||
refreshtoken=""
|
||||
dynamicRemoteAccessType="DISABLED"
|
||||
[upc]
|
||||
apikey="unupc_fab6ff6ffe51040595c6d9ffb63a353ba16cc2ad7d93f813a2e80a5810"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
[api]
|
||||
version="3.2.3+da53f636"
|
||||
version="3.2.3+30d451e0"
|
||||
extraOrigins=""
|
||||
[local]
|
||||
[notifier]
|
||||
@@ -17,7 +17,8 @@ idtoken=""
|
||||
accesstoken=""
|
||||
refreshtoken=""
|
||||
allowedOrigins="/var/run/unraid-notifications.sock, /var/run/unraid-php.sock, /var/run/unraid-cli.sock, http://localhost:8080, https://localhost:4443, https://tower.local:4443, https://192.168.1.150:4443, https://tower:4443, https://192-168-1-150.thisisfourtyrandomcharacters012345678900.myunraid.net:4443, https://85-121-123-122.thisisfourtyrandomcharacters012345678900.myunraid.net:8443, https://10-252-0-1.hash.myunraid.net:4443, https://10-252-1-1.hash.myunraid.net:4443, https://10-253-3-1.hash.myunraid.net:4443, https://10-253-4-1.hash.myunraid.net:4443, https://10-253-5-1.hash.myunraid.net:4443, https://connect.myunraid.net, https://staging.connect.myunraid.net, https://dev-my.myunraid.net:4000, https://studio.apollographql.com"
|
||||
dynamicRemoteAccessType="DISABLED"
|
||||
[upc]
|
||||
apikey="unupc_fab6ff6ffe51040595c6d9ffb63a353ba16cc2ad7d93f813a2e80a5810"
|
||||
[connectionStatus]
|
||||
minigraph="ERROR_RETRYING"
|
||||
minigraph="PRE_INIT"
|
||||
|
||||
2128
api/package-lock.json
generated
2128
api/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -41,14 +41,15 @@
|
||||
"release": "standard-version",
|
||||
"typesync": "typesync",
|
||||
"install:unraid": "./scripts/install-in-unraid.sh",
|
||||
"start:plugin": "LOG_MOTHERSHIP_MESSAGES=true LOG_TYPE=pretty LOG_LEVEL=trace unraid-api start --debug",
|
||||
"start:plugin": "INTROSPECTION=true LOG_MOTHERSHIP_MESSAGES=true LOG_TYPE=pretty LOG_LEVEL=trace unraid-api start --debug",
|
||||
"start:plugin-verbose": "LOG_CONTEXT=true LOG_MOTHERSHIP_MESSAGES=true LOG_TYPE=pretty LOG_LEVEL=trace unraid-api start --debug",
|
||||
"start:dev": "LOG_MOTHERSHIP_MESSAGES=true LOG_TYPE=pretty NODE_ENV=development LOG_LEVEL=trace NODE_ENV=development tsup --config ./tsup.config.ts --watch --onSuccess 'DOTENV_CONFIG_PATH=./.env.development node -r dotenv/config dist/unraid-api.cjs start --debug'",
|
||||
"stop:dev": "LOG_MOTHERSHIP_MESSAGES=true LOG_TYPE=pretty NODE_ENV=development LOG_LEVEL=trace NODE_ENV=development tsup --config ./tsup.config.ts --watch --onSuccess 'DOTENV_CONFIG_PATH=./.env.development node -r dotenv/config dist/unraid-api.cjs stop --debug'",
|
||||
"start:report": "LOG_MOTHERSHIP_MESSAGES=true LOG_TYPE=pretty NODE_ENV=development LOG_LEVEL=trace NODE_ENV=development LOG_CONTEXT=true tsup --config ./tsup.config.ts --watch --onSuccess 'DOTENV_CONFIG_PATH=./.env.development node -r dotenv/config dist/unraid-api.cjs report --debug'",
|
||||
"start:docker": "docker compose run --rm builder-interactive",
|
||||
"docker:dev": "docker-compose run --rm --service-ports dev",
|
||||
"docker:test": "docker-compose run --rm builder npm run test"
|
||||
"build:dev": "GIT_SHA=$(git rev-parse --short HEAD) IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '') docker-compose build dev",
|
||||
"docker:dev": "GIT_SHA=$(git rev-parse --short HEAD) IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '') docker-compose run --rm --service-ports dev",
|
||||
"docker:test": "GIT_SHA=$(git rev-parse --short HEAD) IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '') docker-compose run --rm builder npm run test"
|
||||
},
|
||||
"files": [
|
||||
".env.staging",
|
||||
@@ -59,11 +60,17 @@
|
||||
"dependencies": {
|
||||
"@apollo/client": "^3.7.12",
|
||||
"@apollo/server": "^4.6.0",
|
||||
"@as-integrations/fastify": "^2.1.1",
|
||||
"@graphql-codegen/client-preset": "^4.0.0",
|
||||
"@graphql-tools/load-files": "^6.6.1",
|
||||
"@graphql-tools/merge": "^8.4.0",
|
||||
"@graphql-tools/schema": "^9.0.17",
|
||||
"@graphql-tools/utils": "^9.2.1",
|
||||
"@nestjs/apollo": "^12.0.11",
|
||||
"@nestjs/core": "^10.2.9",
|
||||
"@nestjs/graphql": "^12.0.11",
|
||||
"@nestjs/passport": "^10.0.2",
|
||||
"@nestjs/platform-fastify": "^10.2.9",
|
||||
"@reduxjs/toolkit": "^1.9.5",
|
||||
"@reflet/cron": "^1.3.1",
|
||||
"@runonflux/nat-upnp": "^1.0.2",
|
||||
@@ -75,8 +82,9 @@
|
||||
"bytes": "^3.1.2",
|
||||
"cacheable-lookup": "^6.1.0",
|
||||
"catch-exit": "^1.2.2",
|
||||
"chalk": "^4.1.2",
|
||||
"chokidar": "^3.5.3",
|
||||
"class-transformer": "^0.5.1",
|
||||
"class-validator": "^0.14.0",
|
||||
"cli-table": "^0.3.11",
|
||||
"command-exists": "^1.2.9",
|
||||
"convert": "^4.10.0",
|
||||
@@ -87,14 +95,14 @@
|
||||
"dotenv": "^16.0.3",
|
||||
"express": "^4.18.2",
|
||||
"find-process": "^1.4.7",
|
||||
"graphql": "^16.6.0",
|
||||
"graphql": "^16.8.1",
|
||||
"graphql-fields": "^2.0.3",
|
||||
"graphql-scalars": "^1.21.3",
|
||||
"graphql-subscriptions": "^2.0.0",
|
||||
"graphql-tag": "^2.12.6",
|
||||
"graphql-type-json": "^0.3.2",
|
||||
"graphql-type-uuid": "^0.2.0",
|
||||
"graphql-ws": "^5.12.1",
|
||||
"graphql-ws": "^5.14.2",
|
||||
"htpasswd-js": "^1.0.2",
|
||||
"ini": "^4.1.0",
|
||||
"ip": "^1.1.8",
|
||||
@@ -103,17 +111,22 @@
|
||||
"multi-ini": "^2.2.0",
|
||||
"mustache": "^4.2.0",
|
||||
"nanobus": "^4.5.0",
|
||||
"nest-access-control": "^3.1.0",
|
||||
"nestjs-pino": "^3.5.0",
|
||||
"node-cache": "^5.1.2",
|
||||
"node-window-polyfill": "^1.0.2",
|
||||
"openid-client": "^5.4.0",
|
||||
"p-iteration": "^1.1.8",
|
||||
"p-retry": "^4.6.2",
|
||||
"passport-http-header-strategy": "^1.1.0",
|
||||
"pidusage": "^3.0.2",
|
||||
"pino": "^8.16.2",
|
||||
"pino-http": "^8.5.1",
|
||||
"pino-pretty": "^10.2.3",
|
||||
"reflect-metadata": "^0.1.13",
|
||||
"request": "^2.88.2",
|
||||
"semver": "^7.4.0",
|
||||
"stoppable": "^1.1.0",
|
||||
"subscriptions-transport-ws": "^0.11.0",
|
||||
"systeminformation": "^5.21.2",
|
||||
"ts-command-line-args": "^2.5.0",
|
||||
"uuid": "^9.0.0",
|
||||
@@ -133,6 +146,7 @@
|
||||
"@graphql-codegen/typescript-operations": "^4.0.0",
|
||||
"@graphql-codegen/typescript-resolvers": "4.0.1",
|
||||
"@graphql-typed-document-node/core": "^3.2.0",
|
||||
"@nestjs/testing": "^10.2.10",
|
||||
"@swc/core": "^1.3.81",
|
||||
"@types/async-exit-hook": "^2.0.0",
|
||||
"@types/btoa": "^1.2.3",
|
||||
@@ -174,7 +188,6 @@
|
||||
"graphql-codegen-typescript-validation-schema": "^0.11.0",
|
||||
"ip-regex": "^5.0.0",
|
||||
"json-difference": "^1.9.1",
|
||||
"log4js": "^6.9.1",
|
||||
"map-obj": "^5.0.2",
|
||||
"p-props": "^5.0.0",
|
||||
"path-exists": "^5.0.0",
|
||||
@@ -182,7 +195,6 @@
|
||||
"pkg": "^5.8.1",
|
||||
"pretty-bytes": "^6.1.0",
|
||||
"pretty-ms": "^8.0.0",
|
||||
"serialize-error": "^11.0.2",
|
||||
"standard-version": "^9.5.0",
|
||||
"tsup": "^7.0.0",
|
||||
"typescript": "^4.9.4",
|
||||
|
||||
@@ -16,34 +16,21 @@ vi.mock('@app/core/log', () => ({
|
||||
error: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
trace: vi.fn(),
|
||||
addContext: vi.fn(),
|
||||
removeContext: vi.fn(),
|
||||
},
|
||||
dashboardLogger: {
|
||||
info: vi.fn(),
|
||||
error: vi.fn((...input) => console.log(input)),
|
||||
debug: vi.fn(),
|
||||
trace: vi.fn(),
|
||||
addContext: vi.fn(),
|
||||
removeContext: vi.fn(),
|
||||
},
|
||||
emhttpLogger: {
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
trace: vi.fn(),
|
||||
addContext: vi.fn(),
|
||||
removeContext: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock('@app/common/two-factor', () => ({
|
||||
checkTwoFactorEnabled: vi.fn(() => ({
|
||||
isRemoteEnabled: false,
|
||||
isLocalEnabled: false,
|
||||
})),
|
||||
}));
|
||||
|
||||
vi.mock('@app/common/dashboard/boot-timestamp', () => ({
|
||||
bootTimestamp: new Date('2022-06-10T04:35:58.276Z'),
|
||||
}));
|
||||
@@ -77,7 +64,7 @@ test('Returns generated data', async () => {
|
||||
"case": {
|
||||
"base64": "",
|
||||
"error": "",
|
||||
"icon": "case-model.png",
|
||||
"icon": "",
|
||||
"url": "",
|
||||
},
|
||||
},
|
||||
@@ -107,6 +94,8 @@ test('Returns generated data', async () => {
|
||||
"flashGuid": "0000-0000-0000-000000000000",
|
||||
"regState": "PRO",
|
||||
"regTy": "PRO",
|
||||
"serverDescription": "Dev Server",
|
||||
"serverName": "Tower",
|
||||
},
|
||||
"versions": {
|
||||
"unraid": "6.11.2",
|
||||
|
||||
@@ -1,360 +0,0 @@
|
||||
// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html
|
||||
|
||||
exports[`Returns default permissions 1`] = `
|
||||
{
|
||||
"admin": {
|
||||
"extends": "user",
|
||||
"permissions": [
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "apikey",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "array",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "cpu",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "crash-reporting-enabled",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "device",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "device/unassigned",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "disk",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "disk/settings",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "display",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "docker/container",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "docker/network",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "flash",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "info",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "license-key",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "machine-id",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "memory",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "notifications",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "online",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "os",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "owner",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "parity-history",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "permission",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "registration",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "servers",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "service",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "service/emhttpd",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "service/unraid-api",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "services",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "share",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "software-versions",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "unraid-version",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "uptime",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "user",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "vars",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "vms",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "vms/domain",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "vms/network",
|
||||
},
|
||||
],
|
||||
},
|
||||
"guest": {
|
||||
"permissions": [
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "me",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "welcome",
|
||||
},
|
||||
],
|
||||
},
|
||||
"my_servers": {
|
||||
"extends": "guest",
|
||||
"permissions": [
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "dashboard",
|
||||
},
|
||||
{
|
||||
"action": "read:own",
|
||||
"attributes": "*",
|
||||
"resource": "two-factor",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "array",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "docker/container",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "docker/network",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "notifications",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "vms/domain",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "unraid-version",
|
||||
},
|
||||
],
|
||||
},
|
||||
"notifier": {
|
||||
"extends": "guest",
|
||||
"permissions": [
|
||||
{
|
||||
"action": "create:own",
|
||||
"attributes": "*",
|
||||
"resource": "notifications",
|
||||
},
|
||||
],
|
||||
},
|
||||
"upc": {
|
||||
"extends": "guest",
|
||||
"permissions": [
|
||||
{
|
||||
"action": "read:own",
|
||||
"attributes": "*",
|
||||
"resource": "apikey",
|
||||
},
|
||||
{
|
||||
"action": "read:own",
|
||||
"attributes": "*",
|
||||
"resource": "cloud",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "config",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "crash-reporting-enabled",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "disk",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "display",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "flash",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "os",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "owner",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "permission",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "registration",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "servers",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "vars",
|
||||
},
|
||||
{
|
||||
"action": "read:own",
|
||||
"attributes": "*",
|
||||
"resource": "connect",
|
||||
},
|
||||
{
|
||||
"action": "update:own",
|
||||
"attributes": "*",
|
||||
"resource": "connect",
|
||||
},
|
||||
],
|
||||
},
|
||||
"user": {
|
||||
"extends": "guest",
|
||||
"permissions": [
|
||||
{
|
||||
"action": "read:own",
|
||||
"attributes": "*",
|
||||
"resource": "apikey",
|
||||
},
|
||||
{
|
||||
"action": "read:any",
|
||||
"attributes": "*",
|
||||
"resource": "permission",
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
`;
|
||||
403
api/src/__test__/core/__snapshots__/permissions.test.ts.snap
Normal file
403
api/src/__test__/core/__snapshots__/permissions.test.ts.snap
Normal file
@@ -0,0 +1,403 @@
|
||||
// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html
|
||||
|
||||
exports[`Returns default permissions 1`] = `
|
||||
RolesBuilder {
|
||||
"_grants": {
|
||||
"admin": {
|
||||
"$extend": [
|
||||
"guest",
|
||||
],
|
||||
"apikey": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"array": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"cloud": {
|
||||
"read:own": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"config": {
|
||||
"update:own": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"connect": {
|
||||
"read:own": [
|
||||
"*",
|
||||
],
|
||||
"update:own": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"cpu": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"crash-reporting-enabled": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"customizations": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"device": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"device/unassigned": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"disk": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"disk/settings": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"display": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"docker/container": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"docker/network": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"flash": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"info": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"license-key": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"logs": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"machine-id": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"memory": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"notifications": {
|
||||
"create:any": [
|
||||
"*",
|
||||
],
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"online": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"os": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"owner": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"parity-history": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"permission": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"registration": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"servers": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"service": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"service/emhttpd": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"service/unraid-api": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"services": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"share": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"software-versions": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"unraid-version": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"uptime": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"user": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"vars": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"vms": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"vms/domain": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"vms/network": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
},
|
||||
"guest": {
|
||||
"me": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"welcome": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
},
|
||||
"my_servers": {
|
||||
"$extend": [
|
||||
"guest",
|
||||
],
|
||||
"array": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"customizations": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"dashboard": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"docker/container": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"docker/network": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"logs": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"notifications": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"unraid-version": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"vms": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"vms/domain": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
},
|
||||
"notifier": {
|
||||
"$extend": [
|
||||
"guest",
|
||||
],
|
||||
"notifications": {
|
||||
"create:own": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
},
|
||||
"upc": {
|
||||
"$extend": [
|
||||
"guest",
|
||||
],
|
||||
"apikey": {
|
||||
"read:own": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"cloud": {
|
||||
"read:own": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"config": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
"update:own": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"connect": {
|
||||
"read:own": [
|
||||
"*",
|
||||
],
|
||||
"update:own": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"crash-reporting-enabled": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"customizations": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"disk": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"display": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"flash": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"info": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"logs": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"os": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"owner": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"permission": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"registration": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"servers": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"vars": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
"_isLocked": false,
|
||||
}
|
||||
`;
|
||||
6
api/src/__test__/core/permissions.test.ts
Normal file
6
api/src/__test__/core/permissions.test.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import { expect, test } from 'vitest';
|
||||
import { setupPermissions } from '@app/core/permissions';
|
||||
|
||||
test('Returns default permissions', () => {
|
||||
expect(setupPermissions()).toMatchSnapshot();
|
||||
});
|
||||
167
api/src/__test__/core/utils/files/config-file-normalizer.test.ts
Normal file
167
api/src/__test__/core/utils/files/config-file-normalizer.test.ts
Normal file
@@ -0,0 +1,167 @@
|
||||
import { test, expect } from 'vitest';
|
||||
import { getWriteableConfig } from '@app/core/utils/files/config-file-normalizer';
|
||||
import { initialState } from '@app/store/modules/config';
|
||||
import { cloneDeep } from 'lodash';
|
||||
|
||||
test('it creates a FLASH config with NO OPTIONAL values', () => {
|
||||
const basicConfig = initialState;
|
||||
const config = getWriteableConfig(basicConfig, 'flash');
|
||||
expect(config).toMatchInlineSnapshot(`
|
||||
{
|
||||
"api": {
|
||||
"extraOrigins": "",
|
||||
"version": "",
|
||||
},
|
||||
"local": {},
|
||||
"notifier": {
|
||||
"apikey": "",
|
||||
},
|
||||
"remote": {
|
||||
"accesstoken": "",
|
||||
"apikey": "",
|
||||
"avatar": "",
|
||||
"dynamicRemoteAccessType": "DISABLED",
|
||||
"email": "",
|
||||
"idtoken": "",
|
||||
"refreshtoken": "",
|
||||
"regWizTime": "",
|
||||
"username": "",
|
||||
"wanaccess": "",
|
||||
"wanport": "",
|
||||
},
|
||||
"upc": {
|
||||
"apikey": "",
|
||||
},
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('it creates a MEMORY config with NO OPTIONAL values', () => {
|
||||
const basicConfig = initialState;
|
||||
const config = getWriteableConfig(basicConfig, 'memory');
|
||||
expect(config).toMatchInlineSnapshot(`
|
||||
{
|
||||
"api": {
|
||||
"extraOrigins": "",
|
||||
"version": "",
|
||||
},
|
||||
"connectionStatus": {
|
||||
"minigraph": "PRE_INIT",
|
||||
},
|
||||
"local": {},
|
||||
"notifier": {
|
||||
"apikey": "",
|
||||
},
|
||||
"remote": {
|
||||
"accesstoken": "",
|
||||
"allowedOrigins": "/var/run/unraid-notifications.sock, /var/run/unraid-php.sock, /var/run/unraid-cli.sock, https://connect.myunraid.net, https://staging.connect.myunraid.net, https://dev-my.myunraid.net:4000",
|
||||
"apikey": "",
|
||||
"avatar": "",
|
||||
"dynamicRemoteAccessType": "DISABLED",
|
||||
"email": "",
|
||||
"idtoken": "",
|
||||
"refreshtoken": "",
|
||||
"regWizTime": "",
|
||||
"username": "",
|
||||
"wanaccess": "",
|
||||
"wanport": "",
|
||||
},
|
||||
"upc": {
|
||||
"apikey": "",
|
||||
},
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('it creates a FLASH config with OPTIONAL values', () => {
|
||||
const basicConfig = cloneDeep(initialState);
|
||||
basicConfig.remote['2Fa'] = 'yes';
|
||||
basicConfig.local['2Fa'] = 'yes';
|
||||
basicConfig.local.showT2Fa = 'yes';
|
||||
basicConfig.api.extraOrigins = 'myextra.origins';
|
||||
basicConfig.remote.upnpEnabled = 'yes';
|
||||
basicConfig.connectionStatus.upnpStatus = 'Turned On';
|
||||
const config = getWriteableConfig(basicConfig, 'flash');
|
||||
expect(config).toMatchInlineSnapshot(`
|
||||
{
|
||||
"api": {
|
||||
"extraOrigins": "myextra.origins",
|
||||
"version": "",
|
||||
},
|
||||
"local": {
|
||||
"2Fa": "yes",
|
||||
"showT2Fa": "yes",
|
||||
},
|
||||
"notifier": {
|
||||
"apikey": "",
|
||||
},
|
||||
"remote": {
|
||||
"2Fa": "yes",
|
||||
"accesstoken": "",
|
||||
"apikey": "",
|
||||
"avatar": "",
|
||||
"dynamicRemoteAccessType": "DISABLED",
|
||||
"email": "",
|
||||
"idtoken": "",
|
||||
"refreshtoken": "",
|
||||
"regWizTime": "",
|
||||
"upnpEnabled": "yes",
|
||||
"username": "",
|
||||
"wanaccess": "",
|
||||
"wanport": "",
|
||||
},
|
||||
"upc": {
|
||||
"apikey": "",
|
||||
},
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('it creates a MEMORY config with OPTIONAL values', () => {
|
||||
const basicConfig = cloneDeep(initialState);
|
||||
basicConfig.remote['2Fa'] = 'yes';
|
||||
basicConfig.local['2Fa'] = 'yes';
|
||||
basicConfig.local.showT2Fa = 'yes';
|
||||
basicConfig.api.extraOrigins = 'myextra.origins';
|
||||
basicConfig.remote.upnpEnabled = 'yes';
|
||||
basicConfig.connectionStatus.upnpStatus = 'Turned On';
|
||||
const config = getWriteableConfig(basicConfig, 'memory');
|
||||
expect(config).toMatchInlineSnapshot(`
|
||||
{
|
||||
"api": {
|
||||
"extraOrigins": "myextra.origins",
|
||||
"version": "",
|
||||
},
|
||||
"connectionStatus": {
|
||||
"minigraph": "PRE_INIT",
|
||||
"upnpStatus": "Turned On",
|
||||
},
|
||||
"local": {
|
||||
"2Fa": "yes",
|
||||
"showT2Fa": "yes",
|
||||
},
|
||||
"notifier": {
|
||||
"apikey": "",
|
||||
},
|
||||
"remote": {
|
||||
"2Fa": "yes",
|
||||
"accesstoken": "",
|
||||
"allowedOrigins": "/var/run/unraid-notifications.sock, /var/run/unraid-php.sock, /var/run/unraid-cli.sock, https://connect.myunraid.net, https://staging.connect.myunraid.net, https://dev-my.myunraid.net:4000",
|
||||
"apikey": "",
|
||||
"avatar": "",
|
||||
"dynamicRemoteAccessType": "DISABLED",
|
||||
"email": "",
|
||||
"idtoken": "",
|
||||
"refreshtoken": "",
|
||||
"regWizTime": "",
|
||||
"upnpEnabled": "yes",
|
||||
"username": "",
|
||||
"wanaccess": "",
|
||||
"wanport": "",
|
||||
},
|
||||
"upc": {
|
||||
"apikey": "",
|
||||
},
|
||||
}
|
||||
`);
|
||||
});
|
||||
48
api/src/__test__/mothership/index.test.ts
Normal file
48
api/src/__test__/mothership/index.test.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { beforeEach, expect, test, vi } from 'vitest';
|
||||
|
||||
// Preloading imports for faster tests
|
||||
import '@app/mothership/utils/convert-to-fuzzy-time';
|
||||
|
||||
vi.mock('fs', () => ({
|
||||
default: {
|
||||
readFileSync: vi.fn().mockReturnValue('my-file'),
|
||||
writeFileSync: vi.fn(),
|
||||
existsSync: vi.fn(),
|
||||
},
|
||||
readFileSync: vi.fn().mockReturnValue('my-file'),
|
||||
existsSync: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('@graphql-tools/schema', () => ({
|
||||
makeExecutableSchema: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('@app/core/log', () => ({
|
||||
default: { relayLogger: { trace: vi.fn() } },
|
||||
relayLogger: { trace: vi.fn() },
|
||||
logger: { trace: vi.fn() },
|
||||
}));
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetModules();
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
const generateTestCases = () => {
|
||||
const cases: Array<{ min: number; max: number }> = [];
|
||||
for (let i = 0; i < 15; i += 1) {
|
||||
const min = Math.round(Math.random() * 100);
|
||||
const max = min + (Math.round(Math.random() * 20));
|
||||
cases.push({ min, max });
|
||||
}
|
||||
|
||||
return cases;
|
||||
};
|
||||
|
||||
test.each(generateTestCases())('Successfully converts to fuzzy time %o', async ({ min, max }) => {
|
||||
const { convertToFuzzyTime } = await import('@app/mothership/utils/convert-to-fuzzy-time');
|
||||
|
||||
const res = convertToFuzzyTime(min, max);
|
||||
expect(res).toBeGreaterThanOrEqual(min);
|
||||
expect(res).toBeLessThanOrEqual(max);
|
||||
});
|
||||
@@ -75,15 +75,10 @@ export const getCloudData = async (
|
||||
const cloud = await client.query({ query: getCloudDocument });
|
||||
return cloud.data.cloud ?? null;
|
||||
} catch (error: unknown) {
|
||||
cliLogger.addContext(
|
||||
'error-stack',
|
||||
error instanceof Error ? error.stack : error
|
||||
);
|
||||
cliLogger.trace(
|
||||
'Failed fetching cloud from local graphql with "%s"',
|
||||
error instanceof Error ? error.message : 'Unknown Error'
|
||||
);
|
||||
cliLogger.removeContext('error-stack');
|
||||
|
||||
return null;
|
||||
}
|
||||
@@ -122,12 +117,10 @@ export const getServersData = async ({
|
||||
);
|
||||
return foundServers;
|
||||
} catch (error: unknown) {
|
||||
cliLogger.addContext('error', error);
|
||||
cliLogger.trace(
|
||||
'Failed fetching servers from local graphql with "%s"',
|
||||
error instanceof Error ? error.message : 'Unknown Error'
|
||||
);
|
||||
cliLogger.removeContext('error');
|
||||
return {
|
||||
online: [],
|
||||
offline: [],
|
||||
|
||||
@@ -8,65 +8,77 @@ import { getters } from '@app/store';
|
||||
const command = mainOptions.command as unknown as string;
|
||||
|
||||
export const main = async (...argv: string[]) => {
|
||||
cliLogger.addContext('envs', env);
|
||||
cliLogger.debug('Loading env file');
|
||||
cliLogger.removeContext('envs');
|
||||
cliLogger.debug(env, 'Loading env file');
|
||||
|
||||
// Set envs
|
||||
setEnv('LOG_TYPE', process.env.LOG_TYPE ?? (command === 'start' || mainOptions.debug ? 'pretty' : 'raw'));
|
||||
cliLogger.addContext('paths', getters.paths());
|
||||
cliLogger.debug('Starting CLI');
|
||||
cliLogger.removeContext('paths');
|
||||
// Set envs
|
||||
setEnv(
|
||||
'LOG_TYPE',
|
||||
process.env.LOG_TYPE ??
|
||||
(command === 'start' || mainOptions.debug ? 'pretty' : 'raw')
|
||||
);
|
||||
cliLogger.debug({ paths: getters.paths() }, 'Starting CLI');
|
||||
|
||||
setEnv('DEBUG', mainOptions.debug ?? false);
|
||||
setEnv('ENVIRONMENT', process.env.ENVIRONMENT ?? 'production');
|
||||
setEnv('PORT', process.env.PORT ?? mainOptions.port ?? '9000');
|
||||
setEnv('LOG_LEVEL', process.env.LOG_LEVEL ?? mainOptions['log-level'] ?? 'INFO');
|
||||
if (!process.env.LOG_TRANSPORT) {
|
||||
if (process.env.ENVIRONMENT === 'production' && !mainOptions.debug) {
|
||||
setEnv('LOG_TRANSPORT', 'file,errors');
|
||||
setEnv('LOG_LEVEL', 'DEBUG');
|
||||
} else if (!mainOptions.debug) {
|
||||
// Staging Environment, backgrounded plugin
|
||||
setEnv('LOG_TRANSPORT', 'file,errors');
|
||||
setEnv('LOG_LEVEL', 'TRACE');
|
||||
} else {
|
||||
cliLogger.debug('In Debug Mode - Log Level Defaulting to: stdout');
|
||||
}
|
||||
}
|
||||
setEnv('DEBUG', mainOptions.debug ?? false);
|
||||
setEnv('ENVIRONMENT', process.env.ENVIRONMENT ?? 'production');
|
||||
setEnv('PORT', process.env.PORT ?? mainOptions.port ?? '9000');
|
||||
setEnv(
|
||||
'LOG_LEVEL',
|
||||
process.env.LOG_LEVEL ?? mainOptions['log-level'] ?? 'INFO'
|
||||
);
|
||||
if (!process.env.LOG_TRANSPORT) {
|
||||
if (process.env.ENVIRONMENT === 'production' && !mainOptions.debug) {
|
||||
setEnv('LOG_TRANSPORT', 'file');
|
||||
setEnv('LOG_LEVEL', 'DEBUG');
|
||||
} else if (!mainOptions.debug) {
|
||||
// Staging Environment, backgrounded plugin
|
||||
setEnv('LOG_TRANSPORT', 'file');
|
||||
setEnv('LOG_LEVEL', 'TRACE');
|
||||
} else {
|
||||
cliLogger.debug('In Debug Mode - Log Level Defaulting to: stdout');
|
||||
}
|
||||
}
|
||||
|
||||
if (!command) {
|
||||
// Run help command
|
||||
parse<Flags>(args, { ...options, partial: true, stopAtFirstUnknown: true, argv: ['-h'] });
|
||||
}
|
||||
if (!command) {
|
||||
// Run help command
|
||||
parse<Flags>(args, {
|
||||
...options,
|
||||
partial: true,
|
||||
stopAtFirstUnknown: true,
|
||||
argv: ['-h'],
|
||||
});
|
||||
}
|
||||
|
||||
// Only import the command we need when we use it
|
||||
const commands = {
|
||||
start: import('@app/cli/commands/start').then(pkg => pkg.start),
|
||||
stop: import('@app/cli/commands/stop').then(pkg => pkg.stop),
|
||||
restart: import('@app/cli/commands/restart').then(pkg => pkg.restart),
|
||||
'switch-env': import('@app/cli/commands/switch-env').then(pkg => pkg.switchEnv),
|
||||
version: import('@app/cli/commands/version').then(pkg => pkg.version),
|
||||
status: import('@app/cli/commands/status').then(pkg => pkg.status),
|
||||
report: import('@app/cli/commands/report').then(pkg => pkg.report),
|
||||
'validate-token': import('@app/cli/commands/validate-token').then(pkg => pkg.validateToken),
|
||||
};
|
||||
// Only import the command we need when we use it
|
||||
const commands = {
|
||||
start: import('@app/cli/commands/start').then((pkg) => pkg.start),
|
||||
stop: import('@app/cli/commands/stop').then((pkg) => pkg.stop),
|
||||
restart: import('@app/cli/commands/restart').then((pkg) => pkg.restart),
|
||||
'switch-env': import('@app/cli/commands/switch-env').then(
|
||||
(pkg) => pkg.switchEnv
|
||||
),
|
||||
version: import('@app/cli/commands/version').then((pkg) => pkg.version),
|
||||
status: import('@app/cli/commands/status').then((pkg) => pkg.status),
|
||||
report: import('@app/cli/commands/report').then((pkg) => pkg.report),
|
||||
'validate-token': import('@app/cli/commands/validate-token').then(
|
||||
(pkg) => pkg.validateToken
|
||||
),
|
||||
};
|
||||
|
||||
// Unknown command
|
||||
if (!Object.keys(commands).includes(command)) {
|
||||
throw new Error(`Invalid command "${command}"`);
|
||||
}
|
||||
// Unknown command
|
||||
if (!Object.keys(commands).includes(command)) {
|
||||
throw new Error(`Invalid command "${command}"`);
|
||||
}
|
||||
|
||||
// Resolve the command import
|
||||
const commandMethod = await commands[command];
|
||||
// Resolve the command import
|
||||
const commandMethod = await commands[command];
|
||||
|
||||
// Run the command
|
||||
await commandMethod(...argv);
|
||||
// Run the command
|
||||
await commandMethod(...argv);
|
||||
|
||||
// Allow the process to exit
|
||||
// Don't exit when we start though
|
||||
if (!['start', 'restart'].includes(command)) {
|
||||
// Ensure process is exited
|
||||
process.exit(0);
|
||||
}
|
||||
// Allow the process to exit
|
||||
// Don't exit when we start though
|
||||
if (!['start', 'restart'].includes(command)) {
|
||||
// Ensure process is exited
|
||||
process.exit(0);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -3,7 +3,7 @@ import { uniq } from 'lodash';
|
||||
import { getServerIps, getUrlForField } from '@app/graphql/resolvers/subscription/network';
|
||||
import { FileLoadStatus } from '@app/store/types';
|
||||
import { logger } from '../core';
|
||||
import { ENVIRONMENT, INTROSPECTION } from '@app/environment';
|
||||
import { GRAPHQL_INTROSPECTION } from '@app/environment';
|
||||
|
||||
const getAllowedSocks = (): string[] => [
|
||||
// Notifier bridge
|
||||
@@ -76,7 +76,7 @@ const getConnectOrigins = () : string[] => {
|
||||
}
|
||||
|
||||
const getApolloSandbox = (): string[] => {
|
||||
if (INTROSPECTION || ENVIRONMENT === 'development') {
|
||||
if (GRAPHQL_INTROSPECTION) {
|
||||
return ['https://studio.apollographql.com'];
|
||||
}
|
||||
return [];
|
||||
|
||||
@@ -1,58 +1,64 @@
|
||||
import { ConnectListAllDomainsFlags } from '@vmngr/libvirt';
|
||||
import { getHypervisor } from '@app/core/utils/vms/get-hypervisor';
|
||||
import display from '@app/graphql/resolvers/query/display';
|
||||
import { getUnraidVersion } from '@app/common/dashboard/get-unraid-version';
|
||||
import { getArray } from '@app/common/dashboard/get-array';
|
||||
import { bootTimestamp } from '@app/common/dashboard/boot-timestamp';
|
||||
import { dashboardLogger } from '@app/core/log';
|
||||
import { getters, store } from '@app/store';
|
||||
import { type DashboardServiceInput, type DashboardInput } from '@app/graphql/generated/client/graphql';
|
||||
import {
|
||||
type DashboardServiceInput,
|
||||
type DashboardInput,
|
||||
} from '@app/graphql/generated/client/graphql';
|
||||
import { API_VERSION } from '@app/environment';
|
||||
import { DynamicRemoteAccessType } from '@app/remoteAccess/types';
|
||||
import { DashboardInputSchema } from '@app/graphql/generated/client/validators';
|
||||
import { ZodError } from 'zod';
|
||||
|
||||
const getVmSummary = async (): Promise<DashboardInput['vms']> => {
|
||||
try {
|
||||
const hypervisor = await getHypervisor();
|
||||
if (!hypervisor) {
|
||||
return {
|
||||
installed: 0,
|
||||
started: 0,
|
||||
};
|
||||
}
|
||||
try {
|
||||
const hypervisor = await getHypervisor();
|
||||
if (!hypervisor) {
|
||||
return {
|
||||
installed: 0,
|
||||
started: 0,
|
||||
};
|
||||
}
|
||||
|
||||
const activeDomains = await hypervisor.connectListAllDomains(ConnectListAllDomainsFlags.ACTIVE) as unknown[];
|
||||
const inactiveDomains = await hypervisor.connectListAllDomains(ConnectListAllDomainsFlags.INACTIVE) as unknown[];
|
||||
return {
|
||||
installed: activeDomains.length + inactiveDomains.length,
|
||||
started: activeDomains.length,
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
installed: 0,
|
||||
started: 0,
|
||||
};
|
||||
}
|
||||
const activeDomains = (await hypervisor.connectListAllDomains(
|
||||
ConnectListAllDomainsFlags.ACTIVE
|
||||
)) as unknown[];
|
||||
const inactiveDomains = (await hypervisor.connectListAllDomains(
|
||||
ConnectListAllDomainsFlags.INACTIVE
|
||||
)) as unknown[];
|
||||
return {
|
||||
installed: activeDomains.length + inactiveDomains.length,
|
||||
started: activeDomains.length,
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
installed: 0,
|
||||
started: 0,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const getDynamicRemoteAccessService = (): DashboardServiceInput | null => {
|
||||
const { config, dynamicRemoteAccess } = store.getState();
|
||||
const enabledStatus = config.remote.dynamicRemoteAccessType;
|
||||
const { config, dynamicRemoteAccess } = store.getState();
|
||||
const enabledStatus = config.remote.dynamicRemoteAccessType;
|
||||
|
||||
return {
|
||||
name: 'dynamic-remote-access',
|
||||
online: enabledStatus !== DynamicRemoteAccessType.DISABLED,
|
||||
version: dynamicRemoteAccess.runningType,
|
||||
uptime: {
|
||||
timestamp: bootTimestamp.toISOString(),
|
||||
},
|
||||
};
|
||||
return {
|
||||
name: 'dynamic-remote-access',
|
||||
online: enabledStatus !== DynamicRemoteAccessType.DISABLED,
|
||||
version: dynamicRemoteAccess.runningType,
|
||||
uptime: {
|
||||
timestamp: bootTimestamp.toISOString(),
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
const services = (): DashboardInput['services'] => {
|
||||
const dynamicRemoteAccess = getDynamicRemoteAccessService();
|
||||
return [
|
||||
const dynamicRemoteAccess = getDynamicRemoteAccessService();
|
||||
return [
|
||||
{
|
||||
name: 'unraid-api',
|
||||
online: true,
|
||||
@@ -66,63 +72,81 @@ const services = (): DashboardInput['services'] => {
|
||||
};
|
||||
|
||||
const getData = async (): Promise<DashboardInput> => {
|
||||
const emhttp = getters.emhttp();
|
||||
const docker = getters.docker();
|
||||
const emhttp = getters.emhttp();
|
||||
const docker = getters.docker();
|
||||
|
||||
return {
|
||||
vars: {
|
||||
regState: emhttp.var.regState,
|
||||
regTy: emhttp.var.regTy,
|
||||
flashGuid: emhttp.var.flashGuid,
|
||||
serverName: emhttp.var.name,
|
||||
serverDescription: emhttp.var.comment,
|
||||
},
|
||||
apps: {
|
||||
installed: docker.installed ?? 0,
|
||||
started: docker.running ?? 0
|
||||
},
|
||||
versions: {
|
||||
unraid: await getUnraidVersion(),
|
||||
},
|
||||
os: {
|
||||
hostname: emhttp.var.name,
|
||||
uptime: bootTimestamp.toISOString()
|
||||
},
|
||||
vms: await getVmSummary(),
|
||||
array: getArray(),
|
||||
services: services(),
|
||||
display: await display(),
|
||||
config: emhttp.var.configValid ? { valid: true } : {
|
||||
valid: false,
|
||||
error: {
|
||||
error: 'UNKNOWN_ERROR',
|
||||
invalid: 'INVALID',
|
||||
nokeyserver: 'NO_KEY_SERVER',
|
||||
withdrawn: 'WITHDRAWN',
|
||||
}[emhttp.var.configState] ?? 'UNKNOWN_ERROR',
|
||||
},
|
||||
};
|
||||
return {
|
||||
vars: {
|
||||
regState: emhttp.var.regState,
|
||||
regTy: emhttp.var.regTy,
|
||||
flashGuid: emhttp.var.flashGuid,
|
||||
serverName: emhttp.var.name,
|
||||
serverDescription: emhttp.var.comment,
|
||||
},
|
||||
apps: {
|
||||
installed: docker.installed ?? 0,
|
||||
started: docker.running ?? 0,
|
||||
},
|
||||
versions: {
|
||||
unraid: await getUnraidVersion(),
|
||||
},
|
||||
os: {
|
||||
hostname: emhttp.var.name,
|
||||
uptime: bootTimestamp.toISOString(),
|
||||
},
|
||||
vms: await getVmSummary(),
|
||||
array: getArray(),
|
||||
services: services(),
|
||||
display: {
|
||||
case: {
|
||||
url: '',
|
||||
icon: '',
|
||||
error: '',
|
||||
base64: '',
|
||||
},
|
||||
},
|
||||
config: emhttp.var.configValid
|
||||
? { valid: true }
|
||||
: {
|
||||
valid: false,
|
||||
error:
|
||||
{
|
||||
error: 'UNKNOWN_ERROR',
|
||||
invalid: 'INVALID',
|
||||
nokeyserver: 'NO_KEY_SERVER',
|
||||
withdrawn: 'WITHDRAWN',
|
||||
}[emhttp.var.configState] ?? 'UNKNOWN_ERROR',
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
export const generateData = async (): Promise<DashboardInput | null> => {
|
||||
const data = await getData();
|
||||
const data = await getData();
|
||||
|
||||
try {
|
||||
// Validate generated data
|
||||
// @TODO: Fix this runtype to use generated types from the Zod validators (as seen in mothership Codegen)
|
||||
const result = DashboardInputSchema().parse(data)
|
||||
try {
|
||||
// Validate generated data
|
||||
// @TODO: Fix this runtype to use generated types from the Zod validators (as seen in mothership Codegen)
|
||||
const result = DashboardInputSchema().parse(data);
|
||||
|
||||
return result
|
||||
return result;
|
||||
} catch (error: unknown) {
|
||||
// Log error for user
|
||||
if (error instanceof ZodError) {
|
||||
dashboardLogger.error(
|
||||
'Failed validation with issues: ',
|
||||
error.issues.map((issue) => ({
|
||||
message: issue.message,
|
||||
path: issue.path.join(','),
|
||||
}))
|
||||
);
|
||||
} else {
|
||||
dashboardLogger.error(
|
||||
'Failed validating dashboard object: ',
|
||||
error,
|
||||
data
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
} catch (error: unknown) {
|
||||
// Log error for user
|
||||
if (error instanceof ZodError) {
|
||||
dashboardLogger.error('Failed validation with issues: ' , error.issues.map(issue => ({ message: issue.message, path: issue.path.join(',') })))
|
||||
} else {
|
||||
dashboardLogger.error('Failed validating dashboard object: ', error, data);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
return null;
|
||||
};
|
||||
|
||||
|
||||
@@ -1,122 +0,0 @@
|
||||
export interface Permission { resource: string, action: string, attributes: string }
|
||||
export interface Role {
|
||||
permissions: Array<Permission>
|
||||
extends?: string;
|
||||
}
|
||||
|
||||
export const admin: Role = {
|
||||
extends: 'user',
|
||||
permissions: [
|
||||
// @NOTE: Uncomment the first line to enable creation of api keys.
|
||||
// See the README.md for more information.
|
||||
// @WARNING: This is currently unsupported, please be careful.
|
||||
// { resource: 'apikey', action: 'create:any', attributes: '*' },
|
||||
{ resource: 'apikey', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'array', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'cpu', action: 'read:any', attributes: '*' },
|
||||
{
|
||||
resource: 'crash-reporting-enabled',
|
||||
action: 'read:any',
|
||||
attributes: '*',
|
||||
},
|
||||
{ resource: 'device', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'device/unassigned', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'disk', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'disk/settings', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'display', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'docker/container', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'docker/network', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'flash', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'info', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'license-key', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'machine-id', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'memory', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'notifications', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'online', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'os', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'owner', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'parity-history', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'permission', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'registration', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'servers', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'service', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'service/emhttpd', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'service/unraid-api', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'services', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'share', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'software-versions', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'unraid-version', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'uptime', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'user', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'vars', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'vms', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'vms/domain', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'vms/network', action: 'read:any', attributes: '*' },
|
||||
],
|
||||
};
|
||||
|
||||
export const user: Role = {
|
||||
extends: 'guest',
|
||||
permissions: [
|
||||
{ resource: 'apikey', action: 'read:own', attributes: '*' },
|
||||
{ resource: 'permission', action: 'read:any', attributes: '*' },
|
||||
],
|
||||
};
|
||||
|
||||
export const upc: Role = {
|
||||
extends: 'guest',
|
||||
permissions: [
|
||||
{ resource: 'apikey', action: 'read:own', attributes: '*' },
|
||||
{ resource: 'cloud', action: 'read:own', attributes: '*' },
|
||||
{ resource: 'config', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'crash-reporting-enabled', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'disk', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'display', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'flash', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'os', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'owner', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'permission', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'registration', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'servers', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'vars', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'connect', action: 'read:own', attributes: '*' },
|
||||
{ resource: 'connect', action: 'update:own', attributes: '*' }
|
||||
],
|
||||
};
|
||||
|
||||
export const my_servers: Role = {
|
||||
extends: 'guest',
|
||||
permissions: [
|
||||
{ resource: 'dashboard', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'two-factor', action: 'read:own', attributes: '*' },
|
||||
{ resource: 'array', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'docker/container', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'docker/network', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'notifications', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'vms/domain', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'unraid-version', action: 'read:any', attributes: '*' },
|
||||
],
|
||||
};
|
||||
|
||||
export const notifier: Role = {
|
||||
extends: 'guest',
|
||||
permissions: [
|
||||
{ resource: 'notifications', action: 'create:own', attributes: '*' },
|
||||
],
|
||||
};
|
||||
|
||||
export const guest: Role = {
|
||||
permissions: [
|
||||
{ resource: 'me', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'welcome', action: 'read:any', attributes: '*' },
|
||||
],
|
||||
};
|
||||
|
||||
export const permissions: Record<string, Role> = {
|
||||
guest,
|
||||
user,
|
||||
admin,
|
||||
upc,
|
||||
my_servers,
|
||||
notifier,
|
||||
};
|
||||
@@ -1,137 +1,108 @@
|
||||
import chalk from 'chalk';
|
||||
import { configure, getLogger } from 'log4js';
|
||||
import { serializeError } from 'serialize-error';
|
||||
import { pino } from 'pino';
|
||||
import { LOG_TRANSPORT, LOG_TYPE } from '@app/environment';
|
||||
|
||||
export const levels = ['ALL', 'TRACE', 'DEBUG', 'INFO', 'WARN', 'ERROR', 'FATAL', 'MARK', 'OFF'] as const;
|
||||
import pretty from 'pino-pretty';
|
||||
|
||||
const contextEnabled = Boolean(process.env.LOG_CONTEXT);
|
||||
const stackEnabled = Boolean(process.env.LOG_STACKTRACE);
|
||||
const tracingEnabled = Boolean(process.env.LOG_TRACING);
|
||||
const fullLoggingPattern = chalk`{gray [%d]} %x\{id\} %[[%p]%] %[[%c]%] %m{gray %x\{context\}}${tracingEnabled ? ' %[%f:%l%]' : ''}`;
|
||||
const minimumLoggingPattern = '%m';
|
||||
const appenders = process.env.LOG_TRANSPORT?.split(',').map(transport => transport.trim()) ?? ['out'];
|
||||
const level = levels[levels.indexOf(process.env.LOG_LEVEL?.toUpperCase() as typeof levels[number])] ?? 'INFO';
|
||||
const logLayout = {
|
||||
type: 'pattern',
|
||||
// Depending on what this env is set to we'll either get raw or pretty logs
|
||||
// The reason we do this is to allow the app to change this value
|
||||
// This way pretty logs can be turned off programmatically
|
||||
pattern: process.env.LOG_TYPE === 'pretty' ? fullLoggingPattern : minimumLoggingPattern,
|
||||
tokens: {
|
||||
id() {
|
||||
return chalk`{gray [${process.pid}]}`;
|
||||
},
|
||||
context({ context }: { context?: any }) {
|
||||
if (!contextEnabled || !context) {
|
||||
return '';
|
||||
}
|
||||
export const levels = [
|
||||
'trace',
|
||||
'debug',
|
||||
'info',
|
||||
'warn',
|
||||
'error',
|
||||
'fatal',
|
||||
] as const;
|
||||
|
||||
try {
|
||||
const contextEntries = Object.entries(context)
|
||||
.map(([key, value]) => [key, value instanceof Error ? (stackEnabled ? serializeError(value) : value) : value])
|
||||
.filter(([key]) => key !== 'pid');
|
||||
const cleanContext = Object.fromEntries(contextEntries);
|
||||
return `\n${Object.entries(cleanContext).map(([key, value]) => `${key}=${JSON.stringify(value, null, 2)}`).join(' ')}`;
|
||||
} catch (error: unknown) {
|
||||
const errorInfo = error instanceof Error ? `${error.message}: ${error.stack ?? 'no stack'}` : 'Error not instance of error';
|
||||
return `Error generating context: ${errorInfo}`;
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
const level =
|
||||
levels[
|
||||
levels.indexOf(
|
||||
process.env.LOG_LEVEL?.toLowerCase() as (typeof levels)[number]
|
||||
)
|
||||
] ?? 'info';
|
||||
|
||||
if (process.env.NODE_ENV !== 'test') {
|
||||
// We log to both the stdout and log file
|
||||
// The log file should be changed to errors only unless in debug mode
|
||||
configure({
|
||||
appenders: {
|
||||
file: {
|
||||
type: 'file',
|
||||
filename: '/var/log/unraid-api/stdout.log',
|
||||
maxLogSize: 10_000_000,
|
||||
backups: 0,
|
||||
layout: {
|
||||
...logLayout,
|
||||
// File logs should always be pretty
|
||||
pattern: fullLoggingPattern,
|
||||
},
|
||||
},
|
||||
errorFile: {
|
||||
type: 'file',
|
||||
filename: '/var/log/unraid-api/stderr.log',
|
||||
maxLogSize: 2_500_000,
|
||||
backups: 0,
|
||||
layout: {
|
||||
...logLayout,
|
||||
// File logs should always be pretty
|
||||
pattern: fullLoggingPattern,
|
||||
},
|
||||
},
|
||||
out: {
|
||||
type: 'stdout',
|
||||
layout: logLayout,
|
||||
},
|
||||
errors: { type: 'logLevelFilter', appender: 'errorFile', level: 'error' },
|
||||
},
|
||||
categories: {
|
||||
default: {
|
||||
appenders,
|
||||
level,
|
||||
enableCallStack: tracingEnabled,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
const logDestination =
|
||||
LOG_TRANSPORT === 'file' ? pino.destination('/var/log/unraid-api/stdout.log') : 1;
|
||||
|
||||
export const internalLogger = getLogger('internal');
|
||||
export const logger = getLogger('app');
|
||||
export const mothershipLogger = getLogger('mothership');
|
||||
export const dashboardLogger = getLogger('dashboard');
|
||||
export const emhttpLogger = getLogger('emhttp');
|
||||
export const libvirtLogger = getLogger('libvirt');
|
||||
export const graphqlLogger = getLogger('graphql');
|
||||
export const dockerLogger = getLogger('docker');
|
||||
export const cliLogger = getLogger('cli');
|
||||
export const minigraphLogger = getLogger('minigraph');
|
||||
export const cloudConnectorLogger = getLogger('cloud-connector');
|
||||
export const upnpLogger = getLogger('upnp');
|
||||
export const keyServerLogger = getLogger('key-server');
|
||||
export const remoteAccessLogger = getLogger('remote-access');
|
||||
export const remoteQueryLogger = getLogger('remote-query');
|
||||
const stream =
|
||||
LOG_TYPE === 'pretty'
|
||||
? pretty({
|
||||
singleLine: true,
|
||||
hideObject: false,
|
||||
colorize: true,
|
||||
ignore: 'time,hostname,pid',
|
||||
destination: logDestination,
|
||||
})
|
||||
: pino.destination(logDestination);
|
||||
|
||||
export const logger = pino(
|
||||
{
|
||||
level,
|
||||
timestamp: () => `,"time":"${new Date().toISOString()}"`,
|
||||
formatters: {
|
||||
level: (label: string) => ({ level: label }),
|
||||
},
|
||||
},
|
||||
stream
|
||||
);
|
||||
|
||||
export const internalLogger = logger.child({ logger: 'internal' });
|
||||
export const appLogger = logger.child({ logger: 'app' });
|
||||
export const mothershipLogger = logger.child({ logger: 'mothership' });
|
||||
export const dashboardLogger = logger.child({ logger: 'dashboard' });
|
||||
export const emhttpLogger = logger.child({ logger: 'emhttp' });
|
||||
export const libvirtLogger = logger.child({ logger: 'libvirt' });
|
||||
export const graphqlLogger = logger.child({ logger: 'graphql' });
|
||||
export const dockerLogger = logger.child({ logger: 'docker' });
|
||||
export const cliLogger = logger.child({ logger: 'cli' });
|
||||
export const minigraphLogger = logger.child({ logger: 'minigraph' });
|
||||
export const cloudConnectorLogger = logger.child({ logger: 'cloud-connector' });
|
||||
export const upnpLogger = logger.child({ logger: 'upnp' });
|
||||
export const keyServerLogger = logger.child({ logger: 'key-server' });
|
||||
export const remoteAccessLogger = logger.child({ logger: 'remote-access' });
|
||||
export const remoteQueryLogger = logger.child({ logger: 'remote-query' });
|
||||
export const apiLogger = logger.child({ logger: 'api' });
|
||||
|
||||
export const loggers = [
|
||||
logger,
|
||||
mothershipLogger,
|
||||
dashboardLogger,
|
||||
emhttpLogger,
|
||||
libvirtLogger,
|
||||
graphqlLogger,
|
||||
dockerLogger,
|
||||
cliLogger,
|
||||
minigraphLogger,
|
||||
cloudConnectorLogger,
|
||||
upnpLogger,
|
||||
keyServerLogger,
|
||||
remoteAccessLogger,
|
||||
remoteQueryLogger,
|
||||
internalLogger,
|
||||
appLogger,
|
||||
mothershipLogger,
|
||||
dashboardLogger,
|
||||
emhttpLogger,
|
||||
libvirtLogger,
|
||||
graphqlLogger,
|
||||
dockerLogger,
|
||||
cliLogger,
|
||||
minigraphLogger,
|
||||
cloudConnectorLogger,
|
||||
upnpLogger,
|
||||
keyServerLogger,
|
||||
remoteAccessLogger,
|
||||
remoteQueryLogger,
|
||||
apiLogger
|
||||
];
|
||||
|
||||
// Send SIGUSR1 to increase log level
|
||||
process.on('SIGUSR1', () => {
|
||||
const level = typeof logger.level === 'string' ? logger.level : logger.level.levelStr;
|
||||
const nextLevel = levels[levels.findIndex(_level => _level === level) + 1] ?? levels[0];
|
||||
loggers.forEach(logger => {
|
||||
logger.level = nextLevel;
|
||||
});
|
||||
internalLogger.mark('Log level changed from %s to %s', level, nextLevel);
|
||||
const level = logger.level;
|
||||
const nextLevel =
|
||||
levels[levels.findIndex((_level) => _level === level) + 1] ?? levels[0];
|
||||
loggers.forEach((logger) => {
|
||||
logger.level = nextLevel;
|
||||
});
|
||||
internalLogger.info({
|
||||
message: `Log level changed from ${level} to ${nextLevel}`,
|
||||
});
|
||||
});
|
||||
|
||||
// Send SIGUSR1 to decrease log level
|
||||
process.on('SIGUSR2', () => {
|
||||
const level = typeof logger.level === 'string' ? logger.level : logger.level.levelStr;
|
||||
const nextLevel = levels[levels.findIndex(_level => _level === level) - 1] ?? levels[levels.length - 1];
|
||||
loggers.forEach(logger => {
|
||||
logger.level = nextLevel;
|
||||
});
|
||||
internalLogger.mark('Log level changed from %s to %s', level, nextLevel);
|
||||
const level = logger.level;
|
||||
const nextLevel =
|
||||
levels[levels.findIndex((_level) => _level === level) - 1] ??
|
||||
levels[levels.length - 1];
|
||||
loggers.forEach((logger) => {
|
||||
logger.level = nextLevel;
|
||||
});
|
||||
internalLogger.info({
|
||||
message: `Log level changed from ${level} to ${nextLevel}`,
|
||||
});
|
||||
});
|
||||
|
||||
20
api/src/core/logrotate/setup-logrotate.ts
Normal file
20
api/src/core/logrotate/setup-logrotate.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { writeFile } from 'fs/promises';
|
||||
import { fileExists } from '@app/core/utils/files/file-exists';
|
||||
|
||||
export const setupLogRotation = async () => {
|
||||
if (await fileExists('/etc/logrotate.d/unraid-api')) {
|
||||
return;
|
||||
} else {
|
||||
await writeFile(
|
||||
'/etc/logrotate.d/unraid-api',
|
||||
`
|
||||
/var/log/unraid-api/*.log {
|
||||
rotate 2
|
||||
missingok
|
||||
size 5M
|
||||
}
|
||||
`,
|
||||
{ mode: '644' }
|
||||
);
|
||||
}
|
||||
};
|
||||
@@ -1,126 +0,0 @@
|
||||
// import fs from 'fs';
|
||||
// import { log } from '../log';
|
||||
import type { CoreContext, CoreResult } from '@app/core/types';
|
||||
import { ensurePermission } from '@app/core/utils/permissions/ensure-permission';
|
||||
import { NotImplementedError } from '@app/core/errors/not-implemented-error';
|
||||
import { AppError } from '@app/core/errors/app-error';
|
||||
import { getters } from '@app/store';
|
||||
|
||||
interface Context extends CoreContext {
|
||||
data: {
|
||||
keyUri?: string;
|
||||
trial?: boolean;
|
||||
replacement?: boolean;
|
||||
email?: string;
|
||||
keyFile?: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface Result extends CoreResult {
|
||||
json: {
|
||||
key?: string;
|
||||
type?: string;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a license key.
|
||||
*/
|
||||
export const addLicenseKey = async (context: Context): Promise<Result | void> => {
|
||||
ensurePermission(context.user, {
|
||||
resource: 'license-key',
|
||||
action: 'create',
|
||||
possession: 'any',
|
||||
});
|
||||
|
||||
// Const { data } = context;
|
||||
const emhttp = getters.emhttp();
|
||||
const guid = emhttp.var.regGuid;
|
||||
// Const timestamp = new Date();
|
||||
|
||||
if (!guid) {
|
||||
throw new AppError('guid missing');
|
||||
}
|
||||
|
||||
throw new NotImplementedError();
|
||||
|
||||
// // Connect to unraid.net to request a trial key
|
||||
// if (data?.trial) {
|
||||
// const body = new FormData();
|
||||
// body.append('guid', guid);
|
||||
// body.append('timestamp', timestamp.getTime().toString());
|
||||
|
||||
// const key = await got('https://keys.lime-technology.com/account/trial', { method: 'POST', body })
|
||||
// .then(response => JSON.parse(response.body))
|
||||
// .catch(error => {
|
||||
// log.error(error);
|
||||
// throw new AppError(`Sorry, a HTTP ${error.status} error occurred while registering USB Flash GUID ${guid}`);
|
||||
// });
|
||||
|
||||
// // Update the trial key file
|
||||
// await fs.promises.writeFile('/boot/config/Trial.key', Buffer.from(key, 'base64'));
|
||||
|
||||
// return {
|
||||
// text: 'Thank you for registering, your trial key has been accepted.',
|
||||
// json: {
|
||||
// key
|
||||
// }
|
||||
// };
|
||||
// }
|
||||
|
||||
// // Connect to unraid.net to request a new replacement key
|
||||
// if (data?.replacement) {
|
||||
// const { email, keyFile } = data;
|
||||
|
||||
// if (!email || !keyFile) {
|
||||
// throw new AppError('email or keyFile is missing');
|
||||
// }
|
||||
|
||||
// const body = new FormData();
|
||||
// body.append('guid', guid);
|
||||
// body.append('timestamp', timestamp.getTime().toString());
|
||||
// body.append('email', email);
|
||||
// body.append('keyfile', keyFile);
|
||||
|
||||
// const { body: key } = await got('https://keys.lime-technology.com/account/license/transfer', { method: 'POST', body })
|
||||
// .then(response => JSON.parse(response.body))
|
||||
// .catch(error => {
|
||||
// log.error(error);
|
||||
// throw new AppError(`Sorry, a HTTP ${error.status} error occurred while issuing a replacement for USB Flash GUID ${guid}`);
|
||||
// });
|
||||
|
||||
// // Update the trial key file
|
||||
// await fs.promises.writeFile('/boot/config/Trial.key', Buffer.from(key, 'base64'));
|
||||
|
||||
// return {
|
||||
// text: 'Thank you for registering, your trial key has been registered.',
|
||||
// json: {
|
||||
// key
|
||||
// }
|
||||
// };
|
||||
// }
|
||||
|
||||
// // Register a new server
|
||||
// if (data?.keyUri) {
|
||||
// const parts = data.keyUri.split('.key')[0].split('/');
|
||||
// const { [parts.length - 1]: keyType } = parts;
|
||||
|
||||
// // Download key blob
|
||||
// const { body: key } = await got(data.keyUri)
|
||||
// .then(response => JSON.parse(response.body))
|
||||
// .catch(error => {
|
||||
// log.error(error);
|
||||
// throw new AppError(`Sorry, a HTTP ${error.status} error occurred while registering your key for USB Flash GUID ${guid}`);
|
||||
// });
|
||||
|
||||
// // Save key file
|
||||
// await fs.promises.writeFile(`/boot/config/${keyType}.key`, Buffer.from(key, 'base64'));
|
||||
|
||||
// return {
|
||||
// text: `Thank you for registering, your ${keyType} key has been accepted.`,
|
||||
// json: {
|
||||
// type: keyType
|
||||
// }
|
||||
// };
|
||||
// }
|
||||
};
|
||||
@@ -23,7 +23,6 @@ interface Context extends CoreContext {
|
||||
*/
|
||||
export const addUser = async (context: Context): Promise<CoreResult> => {
|
||||
const { data } = context;
|
||||
|
||||
// Check permissions
|
||||
ensurePermission(context.user, {
|
||||
resource: 'user',
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import camelCaseKeys from 'camelcase-keys';
|
||||
import { docker, ensurePermission } from '@app/core/utils';
|
||||
import { docker } from '@app/core/utils';
|
||||
import { type CoreContext, type CoreResult } from '@app/core/types';
|
||||
import { catchHandlers } from '@app/core/utils/misc/catch-handlers';
|
||||
import { ensurePermission } from '@app/core/utils/permissions/ensure-permission';
|
||||
|
||||
export const getDockerNetworks = async (context: CoreContext): Promise<CoreResult> => {
|
||||
const { user } = context;
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
import { ensurePermission } from '@app/core/utils/permissions/ensure-permission';
|
||||
import { store } from '@app/store';
|
||||
import { type QueryResolvers } from '@app/graphql/generated/api/types';
|
||||
import { getArrayData } from '@app/core/modules/array/get-array-data';
|
||||
|
||||
/**
|
||||
* Get array info.
|
||||
* @returns Array state and array/disk capacity.
|
||||
*/
|
||||
export const getArray: QueryResolvers['array'] = (
|
||||
_,
|
||||
__,
|
||||
context
|
||||
) => {
|
||||
const { user } = context;
|
||||
|
||||
// Check permissions
|
||||
ensurePermission(user, {
|
||||
resource: 'array',
|
||||
action: 'read',
|
||||
possession: 'any',
|
||||
});
|
||||
|
||||
return getArrayData(store.getState);
|
||||
};
|
||||
@@ -86,18 +86,8 @@ const parseDisk = async (
|
||||
* Get all disks.
|
||||
*/
|
||||
export const getDisks = async (
|
||||
context: Context,
|
||||
options?: { temperature: boolean }
|
||||
): Promise<Disk[]> => {
|
||||
const { user } = context;
|
||||
|
||||
// Check permissions
|
||||
ensurePermission(user, {
|
||||
resource: 'disk',
|
||||
action: 'read',
|
||||
possession: 'any',
|
||||
});
|
||||
|
||||
// Return all fields but temperature
|
||||
if (options?.temperature === false) {
|
||||
const partitions = await blockDevices().then((devices) =>
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
import { AppError } from '@app/core/errors/app-error';
|
||||
import type { CoreResult, CoreContext } from '@app/core/types';
|
||||
import { ensurePermission } from '@app/core/utils/permissions/ensure-permission';
|
||||
|
||||
/**
|
||||
* Get all unassigned devices.
|
||||
*/
|
||||
export const getUnassignedDevices = async (context: CoreContext): Promise<CoreResult> => {
|
||||
const { user } = context;
|
||||
|
||||
// Bail if the user doesn't have permission
|
||||
ensurePermission(user, {
|
||||
resource: 'devices/unassigned',
|
||||
action: 'read',
|
||||
possession: 'any',
|
||||
});
|
||||
|
||||
const devices = [];
|
||||
|
||||
if (devices.length === 0) {
|
||||
throw new AppError('No devices found.', 404);
|
||||
}
|
||||
|
||||
return {
|
||||
text: `Unassigned devices: ${JSON.stringify(devices, null, 2)}`,
|
||||
json: devices,
|
||||
};
|
||||
};
|
||||
@@ -1,26 +0,0 @@
|
||||
import type { CoreContext, CoreResult } from '@app/core/types';
|
||||
import { ensurePermission } from '@app/core/utils/permissions/ensure-permission';
|
||||
import { getters } from '@app/store';
|
||||
|
||||
/**
|
||||
* Get all system vars.
|
||||
*/
|
||||
export const getVars = async (context: CoreContext): Promise<CoreResult> => {
|
||||
const { user } = context;
|
||||
|
||||
// Bail if the user doesn't have permission
|
||||
ensurePermission(user, {
|
||||
resource: 'vars',
|
||||
action: 'read',
|
||||
possession: 'any',
|
||||
});
|
||||
|
||||
const emhttp = getters.emhttp();
|
||||
|
||||
return {
|
||||
text: `Vars: ${JSON.stringify(emhttp.var, null, 2)}`,
|
||||
json: {
|
||||
...emhttp.var,
|
||||
},
|
||||
};
|
||||
};
|
||||
23
api/src/core/modules/index.ts
Normal file
23
api/src/core/modules/index.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
// Created from 'create-ts-index'
|
||||
|
||||
export * from './array';
|
||||
export * from './debug';
|
||||
export * from './disks';
|
||||
export * from './docker';
|
||||
export * from './services';
|
||||
export * from './settings';
|
||||
export * from './shares';
|
||||
export * from './users';
|
||||
export * from './vms';
|
||||
export * from './add-share';
|
||||
export * from './add-user';
|
||||
export * from './get-all-shares';
|
||||
export * from './get-apps';
|
||||
export * from './get-devices';
|
||||
export * from './get-disks';
|
||||
export * from './get-me';
|
||||
export * from './get-parity-history';
|
||||
export * from './get-permissions';
|
||||
export * from './get-services';
|
||||
export * from './get-users';
|
||||
export * from './get-welcome';
|
||||
@@ -1,7 +1,7 @@
|
||||
import { execa } from 'execa';
|
||||
import { ensurePermission } from '@app/core/utils';
|
||||
import { type CoreContext, type CoreResult } from '@app/core/types';
|
||||
import { cleanStdout } from '@app/core/utils/misc/clean-stdout';
|
||||
import { ensurePermission } from '@app/core/utils/permissions/ensure-permission';
|
||||
|
||||
interface Result extends CoreResult {
|
||||
json: {
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import type { CoreContext, CoreResult } from '@app/core/types/global';
|
||||
import type { UserShare, DiskShare } from '@app/core/types/states/share';
|
||||
import { AppError } from '@app/core/errors/app-error';
|
||||
import { getShares, ensurePermission } from '@app/core/utils';
|
||||
import { getShares } from '@app/core/utils';
|
||||
import { ensurePermission } from '@app/core/utils/permissions/ensure-permission';
|
||||
|
||||
interface Context extends CoreContext {
|
||||
params: {
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { ConnectListAllDomainsFlags } from '@vmngr/libvirt';
|
||||
import { ensurePermission } from '@app/core/utils/permissions/ensure-permission';
|
||||
import { getHypervisor } from '@app/core/utils/vms/get-hypervisor';
|
||||
import { VmState, type VmDomain, type VmsResolvers } from '@app/graphql/generated/api/types';
|
||||
import { VmState, type VmDomain } from '@app/graphql/generated/api/types';
|
||||
import { GraphQLError } from 'graphql';
|
||||
|
||||
const states = {
|
||||
@@ -18,19 +17,7 @@ const states = {
|
||||
/**
|
||||
* Get vm domains.
|
||||
*/
|
||||
export const domainResolver: VmsResolvers['domain'] = async (
|
||||
_,
|
||||
__,
|
||||
context
|
||||
) => {
|
||||
const { user } = context;
|
||||
|
||||
// Check permissions
|
||||
ensurePermission(user, {
|
||||
resource: 'vms/domain',
|
||||
action: 'read',
|
||||
possession: 'any',
|
||||
});
|
||||
export const getDomains =async () => {
|
||||
|
||||
try {
|
||||
const hypervisor = await getHypervisor();
|
||||
|
||||
@@ -1,33 +1,190 @@
|
||||
import { logger } from '@app/core/log';
|
||||
import { permissions as defaultPermissions } from '@app/core/default-permissions';
|
||||
import { AccessControl } from 'accesscontrol';
|
||||
import { apiLogger } from '@app/core/log';
|
||||
import { RolesBuilder } from 'nest-access-control';
|
||||
|
||||
export interface Permission {
|
||||
role?: string;
|
||||
resource: string;
|
||||
action: string;
|
||||
attributes: string;
|
||||
}
|
||||
export interface Role {
|
||||
permissions: Array<Permission>;
|
||||
extends?: string;
|
||||
}
|
||||
|
||||
// Use built in permissions
|
||||
const getPermissions = () => defaultPermissions;
|
||||
|
||||
// Build permissions array
|
||||
const roles = getPermissions();
|
||||
const permissions = Object.entries(roles).flatMap(([roleName, role]) => [
|
||||
...(role?.permissions ?? []).map(permission => ({
|
||||
...permission,
|
||||
role: roleName,
|
||||
})),
|
||||
]);
|
||||
|
||||
// Grant permissions
|
||||
const ac = new AccessControl(permissions);
|
||||
|
||||
// Extend roles
|
||||
Object.entries(getPermissions()).forEach(([roleName, role]) => {
|
||||
if (role.extends) {
|
||||
ac.extendRole(roleName, role.extends);
|
||||
}
|
||||
});
|
||||
|
||||
logger.addContext('permissions', permissions);
|
||||
logger.trace('Loaded permissions');
|
||||
logger.removeContext('permissions');
|
||||
|
||||
export {
|
||||
ac,
|
||||
const roles: Record<string, Role> = {
|
||||
admin: {
|
||||
extends: 'guest',
|
||||
permissions: [
|
||||
{ resource: 'apikey', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'cloud', action: 'read:own', attributes: '*' },
|
||||
{ resource: 'config', action: 'update:own', attributes: '*' },
|
||||
{ resource: 'connect', action: 'read:own', attributes: '*' },
|
||||
{ resource: 'connect', action: 'update:own', attributes: '*' },
|
||||
{ resource: 'customizations', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'array', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'cpu', action: 'read:any', attributes: '*' },
|
||||
{
|
||||
resource: 'crash-reporting-enabled',
|
||||
action: 'read:any',
|
||||
attributes: '*',
|
||||
},
|
||||
{ resource: 'device', action: 'read:any', attributes: '*' },
|
||||
{
|
||||
resource: 'device/unassigned',
|
||||
action: 'read:any',
|
||||
attributes: '*',
|
||||
},
|
||||
{ resource: 'disk', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'disk/settings', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'display', action: 'read:any', attributes: '*' },
|
||||
{
|
||||
resource: 'docker/container',
|
||||
action: 'read:any',
|
||||
attributes: '*',
|
||||
},
|
||||
{ resource: 'docker/network', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'flash', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'info', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'license-key', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'logs', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'machine-id', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'memory', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'notifications', action: 'read:any', attributes: '*' },
|
||||
{
|
||||
resource: 'notifications',
|
||||
action: 'create:any',
|
||||
attributes: '*',
|
||||
},
|
||||
{ resource: 'online', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'os', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'owner', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'parity-history', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'permission', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'registration', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'servers', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'service', action: 'read:any', attributes: '*' },
|
||||
{
|
||||
resource: 'service/emhttpd',
|
||||
action: 'read:any',
|
||||
attributes: '*',
|
||||
},
|
||||
{
|
||||
resource: 'service/unraid-api',
|
||||
action: 'read:any',
|
||||
attributes: '*',
|
||||
},
|
||||
{ resource: 'services', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'share', action: 'read:any', attributes: '*' },
|
||||
{
|
||||
resource: 'software-versions',
|
||||
action: 'read:any',
|
||||
attributes: '*',
|
||||
},
|
||||
{ resource: 'unraid-version', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'uptime', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'user', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'vars', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'vms', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'vms/domain', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'vms/network', action: 'read:any', attributes: '*' },
|
||||
],
|
||||
},
|
||||
upc: {
|
||||
extends: 'guest',
|
||||
permissions: [
|
||||
{ resource: 'apikey', action: 'read:own', attributes: '*' },
|
||||
{ resource: 'cloud', action: 'read:own', attributes: '*' },
|
||||
{ resource: 'config', action: 'read:any', attributes: '*' },
|
||||
{
|
||||
resource: 'crash-reporting-enabled',
|
||||
action: 'read:any',
|
||||
attributes: '*',
|
||||
},
|
||||
{ resource: 'customizations', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'disk', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'display', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'flash', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'info', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'logs', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'os', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'owner', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'permission', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'registration', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'servers', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'vars', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'config', action: 'update:own', attributes: '*' },
|
||||
{ resource: 'connect', action: 'read:own', attributes: '*' },
|
||||
{ resource: 'connect', action: 'update:own', attributes: '*' },
|
||||
],
|
||||
},
|
||||
my_servers: {
|
||||
extends: 'guest',
|
||||
permissions: [
|
||||
{ resource: 'array', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'customizations', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'dashboard', action: 'read:any', attributes: '*' },
|
||||
{
|
||||
resource: 'docker/container',
|
||||
action: 'read:any',
|
||||
attributes: '*',
|
||||
},
|
||||
{ resource: 'logs', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'docker/network', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'notifications', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'vms', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'vms/domain', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'unraid-version', action: 'read:any', attributes: '*' },
|
||||
],
|
||||
},
|
||||
notifier: {
|
||||
extends: 'guest',
|
||||
permissions: [
|
||||
{
|
||||
resource: 'notifications',
|
||||
action: 'create:own',
|
||||
attributes: '*',
|
||||
},
|
||||
],
|
||||
},
|
||||
guest: {
|
||||
permissions: [
|
||||
{ resource: 'me', action: 'read:any', attributes: '*' },
|
||||
{ resource: 'welcome', action: 'read:any', attributes: '*' },
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
export const setupPermissions = (): RolesBuilder => {
|
||||
// First create an array of permissions that will be used as the base permission set for the app
|
||||
const grantList = Object.entries(roles).reduce<Array<Permission>>(
|
||||
(acc, [roleName, role]) => {
|
||||
if (role.permissions) {
|
||||
role.permissions.forEach((permission) => {
|
||||
acc.push({
|
||||
...permission,
|
||||
role: roleName,
|
||||
});
|
||||
});
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
[]
|
||||
);
|
||||
|
||||
const ac = new RolesBuilder(grantList);
|
||||
|
||||
// Next, Extend roles
|
||||
Object.entries(roles).forEach(([roleName, role]) => {
|
||||
if (role.extends) {
|
||||
ac.extendRole(roleName, role.extends);
|
||||
}
|
||||
});
|
||||
|
||||
apiLogger.debug('Possible Roles: %o', ac.getRoles());
|
||||
|
||||
return ac;
|
||||
};
|
||||
|
||||
export const ac = null;
|
||||
|
||||
@@ -6,12 +6,23 @@ const eventEmitter = new EventEmitter();
|
||||
eventEmitter.setMaxListeners(30);
|
||||
|
||||
export enum PUBSUB_CHANNEL {
|
||||
ARRAY = 'ARRAY',
|
||||
DASHBOARD = 'DASHBOARD',
|
||||
DISPLAY = 'DISPLAY',
|
||||
INFO = 'INFO',
|
||||
NOTIFICATION = 'NOTIFICATION',
|
||||
OWNER = 'OWNER',
|
||||
SERVERS = 'SERVERS',
|
||||
|
||||
VMS = 'VMS',
|
||||
REGISTRATION = 'REGISTRATION',
|
||||
}
|
||||
|
||||
export const pubsub = new PubSub({ eventEmitter });
|
||||
|
||||
/**
|
||||
* Create a pubsub subscription.
|
||||
* @param channel The pubsub channel to subscribe to.
|
||||
*/
|
||||
export const createSubscription = (channel: PUBSUB_CHANNEL) => {
|
||||
return pubsub.asyncIterator(channel);
|
||||
};
|
||||
|
||||
10
api/src/core/utils/index.ts
Normal file
10
api/src/core/utils/index.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
// Created from 'create-ts-index'
|
||||
|
||||
export * from './array';
|
||||
export * from './authentication';
|
||||
export * from './clients';
|
||||
export * from './plugins';
|
||||
export * from './shares';
|
||||
export * from './validation';
|
||||
export * from './vms';
|
||||
export * from './casting';
|
||||
@@ -15,9 +15,7 @@ export const loadState = <T extends Record<string, unknown>>(filePath: string):
|
||||
deep: true,
|
||||
}) as T;
|
||||
|
||||
logger.addContext('config', config);
|
||||
logger.trace('"%s" was loaded', filePath);
|
||||
logger.removeContext('config');
|
||||
logger.trace({ config }, '"%s" was loaded', filePath);
|
||||
|
||||
return config;
|
||||
} catch (error: unknown) {
|
||||
|
||||
35
api/src/core/utils/misc/send-form-to-keyserver.ts
Normal file
35
api/src/core/utils/misc/send-form-to-keyserver.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import { AppError } from '@app/core/errors/app-error';
|
||||
import { logger } from '@app/core/log';
|
||||
import { type CancelableRequest, got, type Response } from 'got';
|
||||
|
||||
export const sendFormToKeyServer = async (url: string, data: Record<string, unknown>): Promise<CancelableRequest<Response<string>>> => {
|
||||
if (!data) {
|
||||
throw new AppError('Missing data field.');
|
||||
}
|
||||
|
||||
// Create form
|
||||
const form = new URLSearchParams();
|
||||
Object.entries(data).forEach(([key, value]) => {
|
||||
if (value !== undefined) {
|
||||
form.append(key, String(value));
|
||||
}
|
||||
});
|
||||
|
||||
// Convert form to string
|
||||
const body = form.toString();
|
||||
|
||||
logger.trace({form: body }, 'Sending form to key-server');
|
||||
|
||||
// Send form
|
||||
return got(url, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'content-type': 'application/x-www-form-urlencoded',
|
||||
},
|
||||
timeout: {
|
||||
request: 5_000,
|
||||
},
|
||||
throwHttpErrors: true,
|
||||
body,
|
||||
});
|
||||
};
|
||||
@@ -14,4 +14,7 @@ export const GRAPHQL_INTROSPECTION = Boolean(
|
||||
export const PORT = process.env.PORT ?? '/var/run/unraid-api.sock';
|
||||
export const DRY_RUN = process.env.DRY_RUN === 'true';
|
||||
export const BYPASS_PERMISSION_CHECKS = process.env.BYPASS_PERMISSION_CHECKS === 'true';
|
||||
export const LOG_CORS = process.env.LOG_CORS === 'true';
|
||||
export const LOG_CORS = process.env.LOG_CORS === 'true';
|
||||
export const LOG_TYPE = process.env.LOG_TYPE as 'pretty' | 'raw';
|
||||
export const LOG_LEVEL = process.env.LOG_LEVEL as 'TRACE' | 'DEBUG' | 'INFO' | 'WARN' | 'ERROR' | 'FATAL';
|
||||
export const LOG_TRANSPORT = process.env.LOG_TRANSPORT as 'file' | 'stdout';
|
||||
@@ -1,52 +0,0 @@
|
||||
import { report } from '@app/cli/commands/report';
|
||||
import { logger } from '@app/core/log';
|
||||
import { apiKeyToUser } from '@app/graphql/index';
|
||||
import { getters } from '@app/store/index';
|
||||
import { execa } from 'execa';
|
||||
import { type Response, type Request } from 'express';
|
||||
import { stat, writeFile } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
|
||||
const saveApiReport = async (pathToReport: string) => {
|
||||
try {
|
||||
const apiReport = await report('-vv', '--json');
|
||||
logger.debug('Report object %o', apiReport);
|
||||
await writeFile(
|
||||
pathToReport,
|
||||
JSON.stringify(apiReport, null, 2),
|
||||
'utf-8'
|
||||
);
|
||||
} catch (error) {
|
||||
logger.warn('Could not generate report for zip with error %o', error);
|
||||
}
|
||||
};
|
||||
|
||||
export const getLogs = async (req: Request, res: Response) => {
|
||||
const apiKey = req.headers['x-api-key'] || req.query.apiKey;
|
||||
const logPath = getters.paths()['log-base'];
|
||||
try {
|
||||
await saveApiReport(join(logPath, 'report.json'));
|
||||
} catch (error) {
|
||||
logger.warn('Could not generate report for zip with error %o', error);
|
||||
}
|
||||
const zipToWrite = join(logPath, '../unraid-api.tar.gz');
|
||||
if (
|
||||
apiKey &&
|
||||
typeof apiKey === 'string' &&
|
||||
(await apiKeyToUser(apiKey)).role !== 'guest'
|
||||
) {
|
||||
const exists = Boolean(await stat(logPath).catch(() => null));
|
||||
if (exists) {
|
||||
try {
|
||||
await execa('tar', ['-czf', zipToWrite, logPath]);
|
||||
return res.status(200).sendFile(zipToWrite);
|
||||
} catch (error) {
|
||||
return res.status(503).send(`Failed: ${error}`);
|
||||
}
|
||||
} else {
|
||||
return res.status(404).send('No Logs Available');
|
||||
}
|
||||
}
|
||||
|
||||
return res.status(403).send('unauthorized');
|
||||
};
|
||||
@@ -1,94 +0,0 @@
|
||||
import get from 'lodash/get';
|
||||
import * as core from '@app/core';
|
||||
import { graphqlLogger } from '@app/core/log';
|
||||
import { mapSchema, getDirective, MapperKind } from '@graphql-tools/utils';
|
||||
import { getCoreModule } from '@app/graphql/index';
|
||||
import type { GraphQLFieldResolver, GraphQLSchema } from 'graphql';
|
||||
import type { User } from '@app/core/types/states/user';
|
||||
|
||||
interface FuncDirective {
|
||||
module: string;
|
||||
data: object;
|
||||
query: any;
|
||||
extractFromResponse: string;
|
||||
}
|
||||
|
||||
const funcDirectiveResolver: (directiveArgs: FuncDirective) => GraphQLFieldResolver<undefined, { user?: User }, { result?: any }> | undefined = ({
|
||||
module: coreModule,
|
||||
data,
|
||||
query,
|
||||
extractFromResponse,
|
||||
}) => async (_, args, context) => {
|
||||
const func = getCoreModule(coreModule);
|
||||
|
||||
const functionContext = {
|
||||
query,
|
||||
data,
|
||||
user: context.user,
|
||||
};
|
||||
|
||||
// Run function
|
||||
const [error, coreMethodResult] = await Promise.resolve(func(functionContext, core))
|
||||
.then(result => [undefined, result])
|
||||
.catch(error_ => {
|
||||
// Ensure we aren't leaking anything in production
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
graphqlLogger.error('Module:', coreModule, 'Error:', error_.message);
|
||||
return [new Error(error_.message)];
|
||||
}
|
||||
|
||||
return [error_];
|
||||
});
|
||||
|
||||
// Bail if we can't get the method to run
|
||||
if (error) {
|
||||
return error;
|
||||
}
|
||||
|
||||
// Get wanted result type or fallback to json
|
||||
const result = coreMethodResult[args.result || 'json'];
|
||||
|
||||
// Allow fields to be extracted
|
||||
if (extractFromResponse) {
|
||||
return get(result, extractFromResponse);
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Get the func directive - this is used to resolve @func directives in the graphql schema
|
||||
* @returns Type definition and schema interceptor to create resolvers for @func directives
|
||||
*/
|
||||
export function getFuncDirective() {
|
||||
const directiveName = 'func';
|
||||
return {
|
||||
funcDirectiveTypeDefs: /* GraphQL */`
|
||||
directive @func(
|
||||
module: String!
|
||||
data: JSON
|
||||
query: JSON
|
||||
result: String
|
||||
extractFromResponse: String
|
||||
) on FIELD_DEFINITION
|
||||
`,
|
||||
funcDirectiveTransformer: (schema: GraphQLSchema): GraphQLSchema => mapSchema(schema, {
|
||||
[MapperKind.MUTATION_ROOT_FIELD](fieldConfig) {
|
||||
const funcDirective = getDirective(schema, fieldConfig, directiveName)?.[0] as FuncDirective | undefined;
|
||||
if (funcDirective?.module) {
|
||||
fieldConfig.resolve = funcDirectiveResolver(funcDirective);
|
||||
}
|
||||
|
||||
return fieldConfig;
|
||||
},
|
||||
[MapperKind.QUERY_ROOT_FIELD](fieldConfig) {
|
||||
const funcDirective = getDirective(schema, fieldConfig, directiveName)?.[0] as FuncDirective | undefined;
|
||||
if (funcDirective?.module) {
|
||||
fieldConfig.resolve = funcDirectiveResolver(funcDirective);
|
||||
}
|
||||
|
||||
return fieldConfig;
|
||||
},
|
||||
}),
|
||||
};
|
||||
}
|
||||
@@ -2,7 +2,7 @@
|
||||
import * as Types from '@app/graphql/generated/api/types';
|
||||
|
||||
import { z } from 'zod'
|
||||
import { AllowedOriginInput, ApiKey, ApiKeyResponse, ArrayType, ArrayCapacity, ArrayDisk, ArrayDiskFsColor, ArrayDiskStatus, ArrayDiskType, ArrayPendingState, ArrayState, Baseboard, Capacity, Case, Cloud, CloudResponse, Config, ConfigErrorState, ConnectSignInInput, ConnectUserInfoInput, ContainerHostConfig, ContainerMount, ContainerPort, ContainerPortType, ContainerState, Device, Devices, Disk, DiskFsType, DiskInterfaceType, DiskPartition, DiskSmartStatus, Display, DockerContainer, DockerNetwork, Flash, Gpu, Importance, Info, InfoApps, InfoCpu, InfoMemory, KeyFile, Me, MemoryFormFactor, MemoryLayout, MemoryType, MinigraphStatus, MinigraphqlResponse, Mount, Network, Notification, NotificationFilter, NotificationInput, NotificationType, Os, Owner, ParityCheck, Partition, Pci, Permissions, ProfileModel, Registration, RegistrationState, RelayResponse, Scope, Server, ServerStatus, Service, SetupRemoteAccessInput, Share, System, Temperature, Theme, TwoFactorLocal, TwoFactorRemote, TwoFactorWithToken, TwoFactorWithoutToken, UnassignedDevice, Uptime, Usb, User, Vars, Versions, VmDomain, VmNetwork, VmState, Vms, WAN_ACCESS_TYPE, WAN_FORWARD_TYPE, Welcome, addApiKeyInput, addScopeInput, addScopeToApiKeyInput, addUserInput, arrayDiskInput, authenticateInput, deleteUserInput, mdState, registrationType, updateApikeyInput, usersInput } from '@app/graphql/generated/api/types'
|
||||
import { AllowedOriginInput, ApiKey, ApiKeyResponse, ArrayType, ArrayCapacity, ArrayDisk, ArrayDiskFsColor, ArrayDiskStatus, ArrayDiskType, ArrayPendingState, ArrayState, Baseboard, Capacity, Case, Cloud, CloudResponse, Config, ConfigErrorState, ConnectSignInInput, ConnectUserInfoInput, ContainerHostConfig, ContainerMount, ContainerPort, ContainerPortType, ContainerState, Devices, Disk, DiskFsType, DiskInterfaceType, DiskPartition, DiskSmartStatus, Display, DockerContainer, DockerNetwork, Flash, Gpu, Importance, Info, InfoApps, InfoCpu, InfoMemory, KeyFile, Me, MemoryFormFactor, MemoryLayout, MemoryType, MinigraphStatus, MinigraphqlResponse, Mount, Network, Notification, NotificationFilter, NotificationInput, NotificationType, Os, Owner, ParityCheck, Partition, Pci, ProfileModel, Registration, RegistrationState, RelayResponse, Server, ServerStatus, Service, SetupRemoteAccessInput, Share, System, Temperature, Theme, UnassignedDevice, Uptime, Usb, User, Vars, Versions, VmDomain, VmState, Vms, WAN_ACCESS_TYPE, WAN_FORWARD_TYPE, Welcome, addApiKeyInput, addUserInput, arrayDiskInput, authenticateInput, deleteUserInput, mdState, registrationType, updateApikeyInput, usersInput } from '@app/graphql/generated/api/types'
|
||||
import { TypedDocumentNode as DocumentNode } from '@graphql-typed-document-node/core';
|
||||
|
||||
type Properties<T> = Required<{
|
||||
@@ -248,17 +248,6 @@ export function ContainerPortSchema(): z.ZodObject<Properties<ContainerPort>> {
|
||||
})
|
||||
}
|
||||
|
||||
export function DeviceSchema(): z.ZodObject<Properties<Device>> {
|
||||
return z.object({
|
||||
__typename: z.literal('Device').optional(),
|
||||
device: z.string().nullish(),
|
||||
id: z.string(),
|
||||
sectorSize: z.string().nullish(),
|
||||
sectors: z.string().nullish(),
|
||||
tag: z.string().nullish()
|
||||
})
|
||||
}
|
||||
|
||||
export function DevicesSchema(): z.ZodObject<Properties<Devices>> {
|
||||
return z.object({
|
||||
__typename: z.literal('Devices').optional(),
|
||||
@@ -471,7 +460,7 @@ export function MeSchema(): z.ZodObject<Properties<Me>> {
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
permissions: definedNonNullAnySchema.nullish(),
|
||||
role: z.string()
|
||||
roles: z.string()
|
||||
})
|
||||
}
|
||||
|
||||
@@ -682,14 +671,6 @@ export function PciSchema(): z.ZodObject<Properties<Pci>> {
|
||||
})
|
||||
}
|
||||
|
||||
export function PermissionsSchema(): z.ZodObject<Properties<Permissions>> {
|
||||
return z.object({
|
||||
__typename: z.literal('Permissions').optional(),
|
||||
grants: definedNonNullAnySchema.nullish(),
|
||||
scopes: definedNonNullAnySchema.nullish()
|
||||
})
|
||||
}
|
||||
|
||||
export function ProfileModelSchema(): z.ZodObject<Properties<ProfileModel>> {
|
||||
return z.object({
|
||||
__typename: z.literal('ProfileModel').optional(),
|
||||
@@ -720,14 +701,6 @@ export function RelayResponseSchema(): z.ZodObject<Properties<RelayResponse>> {
|
||||
})
|
||||
}
|
||||
|
||||
export function ScopeSchema(): z.ZodObject<Properties<Scope>> {
|
||||
return z.object({
|
||||
__typename: z.literal('Scope').optional(),
|
||||
description: z.string().nullish(),
|
||||
name: z.string().nullish()
|
||||
})
|
||||
}
|
||||
|
||||
export function ServerSchema(): z.ZodObject<Properties<Server>> {
|
||||
return z.object({
|
||||
__typename: z.literal('Server').optional(),
|
||||
@@ -794,37 +767,6 @@ export function SystemSchema(): z.ZodObject<Properties<System>> {
|
||||
})
|
||||
}
|
||||
|
||||
export function TwoFactorLocalSchema(): z.ZodObject<Properties<TwoFactorLocal>> {
|
||||
return z.object({
|
||||
__typename: z.literal('TwoFactorLocal').optional(),
|
||||
enabled: z.boolean().nullish()
|
||||
})
|
||||
}
|
||||
|
||||
export function TwoFactorRemoteSchema(): z.ZodObject<Properties<TwoFactorRemote>> {
|
||||
return z.object({
|
||||
__typename: z.literal('TwoFactorRemote').optional(),
|
||||
enabled: z.boolean().nullish()
|
||||
})
|
||||
}
|
||||
|
||||
export function TwoFactorWithTokenSchema(): z.ZodObject<Properties<TwoFactorWithToken>> {
|
||||
return z.object({
|
||||
__typename: z.literal('TwoFactorWithToken').optional(),
|
||||
local: TwoFactorLocalSchema().nullish(),
|
||||
remote: TwoFactorRemoteSchema().nullish(),
|
||||
token: z.string().nullish()
|
||||
})
|
||||
}
|
||||
|
||||
export function TwoFactorWithoutTokenSchema(): z.ZodObject<Properties<TwoFactorWithoutToken>> {
|
||||
return z.object({
|
||||
__typename: z.literal('TwoFactorWithoutToken').optional(),
|
||||
local: TwoFactorLocalSchema().nullish(),
|
||||
remote: TwoFactorRemoteSchema().nullish()
|
||||
})
|
||||
}
|
||||
|
||||
export function UnassignedDeviceSchema(): z.ZodObject<Properties<UnassignedDevice>> {
|
||||
return z.object({
|
||||
__typename: z.literal('UnassignedDevice').optional(),
|
||||
@@ -905,7 +847,7 @@ export function UserSchema(): z.ZodObject<Properties<User>> {
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
password: z.boolean().nullish(),
|
||||
role: z.string()
|
||||
roles: z.string()
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1098,13 +1040,6 @@ export function VmDomainSchema(): z.ZodObject<Properties<VmDomain>> {
|
||||
})
|
||||
}
|
||||
|
||||
export function VmNetworkSchema(): z.ZodObject<Properties<VmNetwork>> {
|
||||
return z.object({
|
||||
__typename: z.literal('VmNetwork').optional(),
|
||||
_placeholderType: z.string().nullish()
|
||||
})
|
||||
}
|
||||
|
||||
export function VmsSchema(): z.ZodObject<Properties<Vms>> {
|
||||
return z.object({
|
||||
__typename: z.literal('Vms').optional(),
|
||||
@@ -1127,20 +1062,6 @@ export function addApiKeyInputSchema(): z.ZodObject<Properties<addApiKeyInput>>
|
||||
})
|
||||
}
|
||||
|
||||
export function addScopeInputSchema(): z.ZodObject<Properties<addScopeInput>> {
|
||||
return z.object({
|
||||
description: z.string().nullish(),
|
||||
name: z.string()
|
||||
})
|
||||
}
|
||||
|
||||
export function addScopeToApiKeyInputSchema(): z.ZodObject<Properties<addScopeToApiKeyInput>> {
|
||||
return z.object({
|
||||
apiKey: z.string(),
|
||||
name: z.string()
|
||||
})
|
||||
}
|
||||
|
||||
export function addUserInputSchema(): z.ZodObject<Properties<addUserInput>> {
|
||||
return z.object({
|
||||
description: z.string().nullish(),
|
||||
|
||||
@@ -294,15 +294,6 @@ export enum ContainerState {
|
||||
RUNNING = 'RUNNING'
|
||||
}
|
||||
|
||||
export type Device = {
|
||||
__typename?: 'Device';
|
||||
device?: Maybe<Scalars['String']['output']>;
|
||||
id: Scalars['ID']['output'];
|
||||
sectorSize?: Maybe<Scalars['String']['output']>;
|
||||
sectors?: Maybe<Scalars['String']['output']>;
|
||||
tag?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type Devices = {
|
||||
__typename?: 'Devices';
|
||||
gpu?: Maybe<Array<Maybe<Gpu>>>;
|
||||
@@ -519,7 +510,7 @@ export type Me = UserAccount & {
|
||||
id: Scalars['ID']['output'];
|
||||
name: Scalars['String']['output'];
|
||||
permissions?: Maybe<Scalars['JSON']['output']>;
|
||||
role: Scalars['String']['output'];
|
||||
roles: Scalars['String']['output'];
|
||||
};
|
||||
|
||||
export enum MemoryFormFactor {
|
||||
@@ -576,10 +567,6 @@ export type Mutation = {
|
||||
addApikey?: Maybe<ApiKey>;
|
||||
/** Add new disk to array */
|
||||
addDiskToArray?: Maybe<ArrayType>;
|
||||
/** Add a new permission scope */
|
||||
addScope?: Maybe<Scope>;
|
||||
/** Add a new permission scope to apiKey */
|
||||
addScopeToApiKey?: Maybe<Scope>;
|
||||
/** Add a new user */
|
||||
addUser?: Maybe<User>;
|
||||
/** Cancel parity check */
|
||||
@@ -627,16 +614,6 @@ export type MutationaddDiskToArrayArgs = {
|
||||
};
|
||||
|
||||
|
||||
export type MutationaddScopeArgs = {
|
||||
input: addScopeInput;
|
||||
};
|
||||
|
||||
|
||||
export type MutationaddScopeToApiKeyArgs = {
|
||||
input: addScopeToApiKeyInput;
|
||||
};
|
||||
|
||||
|
||||
export type MutationaddUserArgs = {
|
||||
input: addUserInput;
|
||||
};
|
||||
@@ -869,12 +846,6 @@ export type Pci = {
|
||||
vendorname?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type Permissions = {
|
||||
__typename?: 'Permissions';
|
||||
grants?: Maybe<Scalars['JSON']['output']>;
|
||||
scopes?: Maybe<Scalars['JSON']['output']>;
|
||||
};
|
||||
|
||||
export type ProfileModel = {
|
||||
__typename?: 'ProfileModel';
|
||||
avatar?: Maybe<Scalars['String']['output']>;
|
||||
@@ -891,9 +862,6 @@ export type Query = {
|
||||
array: ArrayType;
|
||||
cloud?: Maybe<Cloud>;
|
||||
config: Config;
|
||||
crashReportingEnabled?: Maybe<Scalars['Boolean']['output']>;
|
||||
device?: Maybe<Device>;
|
||||
devices: Array<Maybe<Device>>;
|
||||
/** Single disk */
|
||||
disk?: Maybe<Disk>;
|
||||
/** Mulitiple disks */
|
||||
@@ -913,29 +881,19 @@ export type Query = {
|
||||
online?: Maybe<Scalars['Boolean']['output']>;
|
||||
owner?: Maybe<Owner>;
|
||||
parityHistory?: Maybe<Array<Maybe<ParityCheck>>>;
|
||||
permissions?: Maybe<Permissions>;
|
||||
registration?: Maybe<Registration>;
|
||||
server?: Maybe<Server>;
|
||||
servers: Array<Server>;
|
||||
/** Network Shares */
|
||||
shares?: Maybe<Array<Maybe<Share>>>;
|
||||
twoFactor?: Maybe<TwoFactorWithToken>;
|
||||
unassignedDevices?: Maybe<Array<Maybe<UnassignedDevice>>>;
|
||||
/** User account */
|
||||
user?: Maybe<User>;
|
||||
/** User accounts */
|
||||
users: Array<User>;
|
||||
vars?: Maybe<Vars>;
|
||||
/** Virtual network for vms */
|
||||
vmNetwork?: Maybe<Scalars['JSON']['output']>;
|
||||
/** Virtual machines */
|
||||
vms?: Maybe<Vms>;
|
||||
welcome?: Maybe<Welcome>;
|
||||
};
|
||||
|
||||
|
||||
export type QuerydeviceArgs = {
|
||||
id: Scalars['ID']['input'];
|
||||
};
|
||||
|
||||
|
||||
@@ -964,11 +922,6 @@ export type QuerynotificationsArgs = {
|
||||
};
|
||||
|
||||
|
||||
export type QueryserverArgs = {
|
||||
name: Scalars['String']['input'];
|
||||
};
|
||||
|
||||
|
||||
export type QueryuserArgs = {
|
||||
id: Scalars['ID']['input'];
|
||||
};
|
||||
@@ -978,11 +931,6 @@ export type QueryusersArgs = {
|
||||
input?: InputMaybe<usersInput>;
|
||||
};
|
||||
|
||||
|
||||
export type QueryvmNetworkArgs = {
|
||||
name: Scalars['String']['input'];
|
||||
};
|
||||
|
||||
export type Registration = {
|
||||
__typename?: 'Registration';
|
||||
expiration?: Maybe<Scalars['String']['output']>;
|
||||
@@ -1040,15 +988,6 @@ export type RelayResponse = {
|
||||
timeout?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
/** A permission scope */
|
||||
export type Scope = {
|
||||
__typename?: 'Scope';
|
||||
/** A user friendly description */
|
||||
description?: Maybe<Scalars['String']['output']>;
|
||||
/** A unique name for the scope */
|
||||
name?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type Server = {
|
||||
__typename?: 'Server';
|
||||
apikey: Scalars['String']['output'];
|
||||
@@ -1114,9 +1053,6 @@ export type Subscription = {
|
||||
apikeys?: Maybe<Array<Maybe<ApiKey>>>;
|
||||
array: ArrayType;
|
||||
config: Config;
|
||||
crashReportingEnabled: Scalars['Boolean']['output'];
|
||||
device: Device;
|
||||
devices?: Maybe<Array<Device>>;
|
||||
display?: Maybe<Display>;
|
||||
dockerContainer: DockerContainer;
|
||||
dockerContainers?: Maybe<Array<Maybe<DockerContainer>>>;
|
||||
@@ -1131,26 +1067,18 @@ export type Subscription = {
|
||||
parityHistory: ParityCheck;
|
||||
ping: Scalars['String']['output'];
|
||||
registration: Registration;
|
||||
server: Server;
|
||||
servers?: Maybe<Array<Server>>;
|
||||
server?: Maybe<Server>;
|
||||
service?: Maybe<Array<Service>>;
|
||||
share: Share;
|
||||
shares?: Maybe<Array<Share>>;
|
||||
twoFactor?: Maybe<TwoFactorWithoutToken>;
|
||||
unassignedDevices?: Maybe<Array<UnassignedDevice>>;
|
||||
user: User;
|
||||
users: Array<Maybe<User>>;
|
||||
vars: Vars;
|
||||
vmNetworks?: Maybe<Array<VmNetwork>>;
|
||||
vms?: Maybe<Vms>;
|
||||
};
|
||||
|
||||
|
||||
export type SubscriptiondeviceArgs = {
|
||||
id: Scalars['ID']['input'];
|
||||
};
|
||||
|
||||
|
||||
export type SubscriptiondockerContainerArgs = {
|
||||
id: Scalars['ID']['input'];
|
||||
};
|
||||
@@ -1161,11 +1089,6 @@ export type SubscriptiondockerNetworkArgs = {
|
||||
};
|
||||
|
||||
|
||||
export type SubscriptionserverArgs = {
|
||||
name: Scalars['String']['input'];
|
||||
};
|
||||
|
||||
|
||||
export type SubscriptionserviceArgs = {
|
||||
name: Scalars['String']['input'];
|
||||
};
|
||||
@@ -1199,29 +1122,6 @@ export enum Theme {
|
||||
WHITE = 'white'
|
||||
}
|
||||
|
||||
export type TwoFactorLocal = {
|
||||
__typename?: 'TwoFactorLocal';
|
||||
enabled?: Maybe<Scalars['Boolean']['output']>;
|
||||
};
|
||||
|
||||
export type TwoFactorRemote = {
|
||||
__typename?: 'TwoFactorRemote';
|
||||
enabled?: Maybe<Scalars['Boolean']['output']>;
|
||||
};
|
||||
|
||||
export type TwoFactorWithToken = {
|
||||
__typename?: 'TwoFactorWithToken';
|
||||
local?: Maybe<TwoFactorLocal>;
|
||||
remote?: Maybe<TwoFactorRemote>;
|
||||
token?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type TwoFactorWithoutToken = {
|
||||
__typename?: 'TwoFactorWithoutToken';
|
||||
local?: Maybe<TwoFactorLocal>;
|
||||
remote?: Maybe<TwoFactorRemote>;
|
||||
};
|
||||
|
||||
export type UnassignedDevice = {
|
||||
__typename?: 'UnassignedDevice';
|
||||
devlinks?: Maybe<Scalars['String']['output']>;
|
||||
@@ -1298,14 +1198,14 @@ export type User = UserAccount & {
|
||||
name: Scalars['String']['output'];
|
||||
/** If the account has a password set */
|
||||
password?: Maybe<Scalars['Boolean']['output']>;
|
||||
role: Scalars['String']['output'];
|
||||
roles: Scalars['String']['output'];
|
||||
};
|
||||
|
||||
export type UserAccount = {
|
||||
description: Scalars['String']['output'];
|
||||
id: Scalars['ID']['output'];
|
||||
name: Scalars['String']['output'];
|
||||
role: Scalars['String']['output'];
|
||||
roles: Scalars['String']['output'];
|
||||
};
|
||||
|
||||
export type Vars = {
|
||||
@@ -1511,11 +1411,6 @@ export type VmDomain = {
|
||||
uuid: Scalars['ID']['output'];
|
||||
};
|
||||
|
||||
export type VmNetwork = {
|
||||
__typename?: 'VmNetwork';
|
||||
_placeholderType?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export enum VmState {
|
||||
CRASHED = 'CRASHED',
|
||||
IDLE = 'IDLE',
|
||||
@@ -1554,19 +1449,6 @@ export type addApiKeyInput = {
|
||||
userId?: InputMaybe<Scalars['String']['input']>;
|
||||
};
|
||||
|
||||
export type addScopeInput = {
|
||||
/** Scope description */
|
||||
description?: InputMaybe<Scalars['String']['input']>;
|
||||
/** Scope name */
|
||||
name: Scalars['String']['input'];
|
||||
};
|
||||
|
||||
export type addScopeToApiKeyInput = {
|
||||
apiKey: Scalars['String']['input'];
|
||||
/** Scope name */
|
||||
name: Scalars['String']['input'];
|
||||
};
|
||||
|
||||
export type addUserInput = {
|
||||
description?: InputMaybe<Scalars['String']['input']>;
|
||||
name: Scalars['String']['input'];
|
||||
@@ -1716,7 +1598,6 @@ export type ResolversTypes = ResolversObject<{
|
||||
ContainerPortType: ContainerPortType;
|
||||
ContainerState: ContainerState;
|
||||
DateTime: ResolverTypeWrapper<Scalars['DateTime']['output']>;
|
||||
Device: ResolverTypeWrapper<Device>;
|
||||
Devices: ResolverTypeWrapper<Devices>;
|
||||
Disk: ResolverTypeWrapper<Disk>;
|
||||
DiskFsType: DiskFsType;
|
||||
@@ -1757,14 +1638,12 @@ export type ResolversTypes = ResolversObject<{
|
||||
ParityCheck: ResolverTypeWrapper<ParityCheck>;
|
||||
Partition: ResolverTypeWrapper<Partition>;
|
||||
Pci: ResolverTypeWrapper<Pci>;
|
||||
Permissions: ResolverTypeWrapper<Permissions>;
|
||||
Port: ResolverTypeWrapper<Scalars['Port']['output']>;
|
||||
ProfileModel: ResolverTypeWrapper<ProfileModel>;
|
||||
Query: ResolverTypeWrapper<{}>;
|
||||
Registration: ResolverTypeWrapper<Registration>;
|
||||
RegistrationState: RegistrationState;
|
||||
RelayResponse: ResolverTypeWrapper<RelayResponse>;
|
||||
Scope: ResolverTypeWrapper<Scope>;
|
||||
Server: ResolverTypeWrapper<Server>;
|
||||
ServerStatus: ServerStatus;
|
||||
Service: ResolverTypeWrapper<Service>;
|
||||
@@ -1775,10 +1654,6 @@ export type ResolversTypes = ResolversObject<{
|
||||
System: ResolverTypeWrapper<System>;
|
||||
Temperature: Temperature;
|
||||
Theme: Theme;
|
||||
TwoFactorLocal: ResolverTypeWrapper<TwoFactorLocal>;
|
||||
TwoFactorRemote: ResolverTypeWrapper<TwoFactorRemote>;
|
||||
TwoFactorWithToken: ResolverTypeWrapper<TwoFactorWithToken>;
|
||||
TwoFactorWithoutToken: ResolverTypeWrapper<TwoFactorWithoutToken>;
|
||||
UUID: ResolverTypeWrapper<Scalars['UUID']['output']>;
|
||||
UnassignedDevice: ResolverTypeWrapper<UnassignedDevice>;
|
||||
Uptime: ResolverTypeWrapper<Uptime>;
|
||||
@@ -1788,15 +1663,12 @@ export type ResolversTypes = ResolversObject<{
|
||||
Vars: ResolverTypeWrapper<Vars>;
|
||||
Versions: ResolverTypeWrapper<Versions>;
|
||||
VmDomain: ResolverTypeWrapper<VmDomain>;
|
||||
VmNetwork: ResolverTypeWrapper<VmNetwork>;
|
||||
VmState: VmState;
|
||||
Vms: ResolverTypeWrapper<Vms>;
|
||||
WAN_ACCESS_TYPE: WAN_ACCESS_TYPE;
|
||||
WAN_FORWARD_TYPE: WAN_FORWARD_TYPE;
|
||||
Welcome: ResolverTypeWrapper<Welcome>;
|
||||
addApiKeyInput: addApiKeyInput;
|
||||
addScopeInput: addScopeInput;
|
||||
addScopeToApiKeyInput: addScopeToApiKeyInput;
|
||||
addUserInput: addUserInput;
|
||||
arrayDiskInput: arrayDiskInput;
|
||||
authenticateInput: authenticateInput;
|
||||
@@ -1828,7 +1700,6 @@ export type ResolversParentTypes = ResolversObject<{
|
||||
ContainerMount: ContainerMount;
|
||||
ContainerPort: ContainerPort;
|
||||
DateTime: Scalars['DateTime']['output'];
|
||||
Device: Device;
|
||||
Devices: Devices;
|
||||
Disk: Disk;
|
||||
DiskPartition: DiskPartition;
|
||||
@@ -1861,13 +1732,11 @@ export type ResolversParentTypes = ResolversObject<{
|
||||
ParityCheck: ParityCheck;
|
||||
Partition: Partition;
|
||||
Pci: Pci;
|
||||
Permissions: Permissions;
|
||||
Port: Scalars['Port']['output'];
|
||||
ProfileModel: ProfileModel;
|
||||
Query: {};
|
||||
Registration: Registration;
|
||||
RelayResponse: RelayResponse;
|
||||
Scope: Scope;
|
||||
Server: Server;
|
||||
Service: Service;
|
||||
SetupRemoteAccessInput: SetupRemoteAccessInput;
|
||||
@@ -1875,10 +1744,6 @@ export type ResolversParentTypes = ResolversObject<{
|
||||
String: Scalars['String']['output'];
|
||||
Subscription: {};
|
||||
System: System;
|
||||
TwoFactorLocal: TwoFactorLocal;
|
||||
TwoFactorRemote: TwoFactorRemote;
|
||||
TwoFactorWithToken: TwoFactorWithToken;
|
||||
TwoFactorWithoutToken: TwoFactorWithoutToken;
|
||||
UUID: Scalars['UUID']['output'];
|
||||
UnassignedDevice: UnassignedDevice;
|
||||
Uptime: Uptime;
|
||||
@@ -1888,12 +1753,9 @@ export type ResolversParentTypes = ResolversObject<{
|
||||
Vars: Vars;
|
||||
Versions: Versions;
|
||||
VmDomain: VmDomain;
|
||||
VmNetwork: VmNetwork;
|
||||
Vms: Vms;
|
||||
Welcome: Welcome;
|
||||
addApiKeyInput: addApiKeyInput;
|
||||
addScopeInput: addScopeInput;
|
||||
addScopeToApiKeyInput: addScopeToApiKeyInput;
|
||||
addUserInput: addUserInput;
|
||||
arrayDiskInput: arrayDiskInput;
|
||||
authenticateInput: authenticateInput;
|
||||
@@ -1902,12 +1764,6 @@ export type ResolversParentTypes = ResolversObject<{
|
||||
usersInput: usersInput;
|
||||
}>;
|
||||
|
||||
export type subscriptionDirectiveArgs = {
|
||||
channel: Scalars['String']['input'];
|
||||
};
|
||||
|
||||
export type subscriptionDirectiveResolver<Result, Parent, ContextType = Context, Args = subscriptionDirectiveArgs> = DirectiveResolverFn<Result, Parent, ContextType, Args>;
|
||||
|
||||
export type ApiKeyResolvers<ContextType = Context, ParentType extends ResolversParentTypes['ApiKey'] = ResolversParentTypes['ApiKey']> = ResolversObject<{
|
||||
description?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
expiresAt?: Resolver<ResolversTypes['Long'], ParentType, ContextType>;
|
||||
@@ -2043,15 +1899,6 @@ export interface DateTimeScalarConfig extends GraphQLScalarTypeConfig<ResolversT
|
||||
name: 'DateTime';
|
||||
}
|
||||
|
||||
export type DeviceResolvers<ContextType = Context, ParentType extends ResolversParentTypes['Device'] = ResolversParentTypes['Device']> = ResolversObject<{
|
||||
device?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
id?: Resolver<ResolversTypes['ID'], ParentType, ContextType>;
|
||||
sectorSize?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
sectors?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
tag?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export type DevicesResolvers<ContextType = Context, ParentType extends ResolversParentTypes['Devices'] = ResolversParentTypes['Devices']> = ResolversObject<{
|
||||
gpu?: Resolver<Maybe<Array<Maybe<ResolversTypes['Gpu']>>>, ParentType, ContextType>;
|
||||
network?: Resolver<Maybe<Array<Maybe<ResolversTypes['Network']>>>, ParentType, ContextType>;
|
||||
@@ -2244,7 +2091,7 @@ export type MeResolvers<ContextType = Context, ParentType extends ResolversParen
|
||||
id?: Resolver<ResolversTypes['ID'], ParentType, ContextType>;
|
||||
name?: Resolver<ResolversTypes['String'], ParentType, ContextType>;
|
||||
permissions?: Resolver<Maybe<ResolversTypes['JSON']>, ParentType, ContextType>;
|
||||
role?: Resolver<ResolversTypes['String'], ParentType, ContextType>;
|
||||
roles?: Resolver<ResolversTypes['String'], ParentType, ContextType>;
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
@@ -2281,8 +2128,6 @@ export type MountResolvers<ContextType = Context, ParentType extends ResolversPa
|
||||
export type MutationResolvers<ContextType = Context, ParentType extends ResolversParentTypes['Mutation'] = ResolversParentTypes['Mutation']> = ResolversObject<{
|
||||
addApikey?: Resolver<Maybe<ResolversTypes['ApiKey']>, ParentType, ContextType, RequireFields<MutationaddApikeyArgs, 'name'>>;
|
||||
addDiskToArray?: Resolver<Maybe<ResolversTypes['Array']>, ParentType, ContextType, Partial<MutationaddDiskToArrayArgs>>;
|
||||
addScope?: Resolver<Maybe<ResolversTypes['Scope']>, ParentType, ContextType, RequireFields<MutationaddScopeArgs, 'input'>>;
|
||||
addScopeToApiKey?: Resolver<Maybe<ResolversTypes['Scope']>, ParentType, ContextType, RequireFields<MutationaddScopeToApiKeyArgs, 'input'>>;
|
||||
addUser?: Resolver<Maybe<ResolversTypes['User']>, ParentType, ContextType, RequireFields<MutationaddUserArgs, 'input'>>;
|
||||
cancelParityCheck?: Resolver<Maybe<ResolversTypes['JSON']>, ParentType, ContextType>;
|
||||
clearArrayDiskStatistics?: Resolver<Maybe<ResolversTypes['JSON']>, ParentType, ContextType, RequireFields<MutationclearArrayDiskStatisticsArgs, 'id'>>;
|
||||
@@ -2442,12 +2287,6 @@ export type PciResolvers<ContextType = Context, ParentType extends ResolversPare
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export type PermissionsResolvers<ContextType = Context, ParentType extends ResolversParentTypes['Permissions'] = ResolversParentTypes['Permissions']> = ResolversObject<{
|
||||
grants?: Resolver<Maybe<ResolversTypes['JSON']>, ParentType, ContextType>;
|
||||
scopes?: Resolver<Maybe<ResolversTypes['JSON']>, ParentType, ContextType>;
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export interface PortScalarConfig extends GraphQLScalarTypeConfig<ResolversTypes['Port'], any> {
|
||||
name: 'Port';
|
||||
}
|
||||
@@ -2465,9 +2304,6 @@ export type QueryResolvers<ContextType = Context, ParentType extends ResolversPa
|
||||
array?: Resolver<ResolversTypes['Array'], ParentType, ContextType>;
|
||||
cloud?: Resolver<Maybe<ResolversTypes['Cloud']>, ParentType, ContextType>;
|
||||
config?: Resolver<ResolversTypes['Config'], ParentType, ContextType>;
|
||||
crashReportingEnabled?: Resolver<Maybe<ResolversTypes['Boolean']>, ParentType, ContextType>;
|
||||
device?: Resolver<Maybe<ResolversTypes['Device']>, ParentType, ContextType, RequireFields<QuerydeviceArgs, 'id'>>;
|
||||
devices?: Resolver<Array<Maybe<ResolversTypes['Device']>>, ParentType, ContextType>;
|
||||
disk?: Resolver<Maybe<ResolversTypes['Disk']>, ParentType, ContextType, RequireFields<QuerydiskArgs, 'id'>>;
|
||||
disks?: Resolver<Array<Maybe<ResolversTypes['Disk']>>, ParentType, ContextType>;
|
||||
display?: Resolver<Maybe<ResolversTypes['Display']>, ParentType, ContextType>;
|
||||
@@ -2481,19 +2317,15 @@ export type QueryResolvers<ContextType = Context, ParentType extends ResolversPa
|
||||
online?: Resolver<Maybe<ResolversTypes['Boolean']>, ParentType, ContextType>;
|
||||
owner?: Resolver<Maybe<ResolversTypes['Owner']>, ParentType, ContextType>;
|
||||
parityHistory?: Resolver<Maybe<Array<Maybe<ResolversTypes['ParityCheck']>>>, ParentType, ContextType>;
|
||||
permissions?: Resolver<Maybe<ResolversTypes['Permissions']>, ParentType, ContextType>;
|
||||
registration?: Resolver<Maybe<ResolversTypes['Registration']>, ParentType, ContextType>;
|
||||
server?: Resolver<Maybe<ResolversTypes['Server']>, ParentType, ContextType, RequireFields<QueryserverArgs, 'name'>>;
|
||||
server?: Resolver<Maybe<ResolversTypes['Server']>, ParentType, ContextType>;
|
||||
servers?: Resolver<Array<ResolversTypes['Server']>, ParentType, ContextType>;
|
||||
shares?: Resolver<Maybe<Array<Maybe<ResolversTypes['Share']>>>, ParentType, ContextType>;
|
||||
twoFactor?: Resolver<Maybe<ResolversTypes['TwoFactorWithToken']>, ParentType, ContextType>;
|
||||
unassignedDevices?: Resolver<Maybe<Array<Maybe<ResolversTypes['UnassignedDevice']>>>, ParentType, ContextType>;
|
||||
user?: Resolver<Maybe<ResolversTypes['User']>, ParentType, ContextType, RequireFields<QueryuserArgs, 'id'>>;
|
||||
users?: Resolver<Array<ResolversTypes['User']>, ParentType, ContextType, Partial<QueryusersArgs>>;
|
||||
vars?: Resolver<Maybe<ResolversTypes['Vars']>, ParentType, ContextType>;
|
||||
vmNetwork?: Resolver<Maybe<ResolversTypes['JSON']>, ParentType, ContextType, RequireFields<QueryvmNetworkArgs, 'name'>>;
|
||||
vms?: Resolver<Maybe<ResolversTypes['Vms']>, ParentType, ContextType>;
|
||||
welcome?: Resolver<Maybe<ResolversTypes['Welcome']>, ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export type RegistrationResolvers<ContextType = Context, ParentType extends ResolversParentTypes['Registration'] = ResolversParentTypes['Registration']> = ResolversObject<{
|
||||
@@ -2512,12 +2344,6 @@ export type RelayResponseResolvers<ContextType = Context, ParentType extends Res
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export type ScopeResolvers<ContextType = Context, ParentType extends ResolversParentTypes['Scope'] = ResolversParentTypes['Scope']> = ResolversObject<{
|
||||
description?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
name?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export type ServerResolvers<ContextType = Context, ParentType extends ResolversParentTypes['Server'] = ResolversParentTypes['Server']> = ResolversObject<{
|
||||
apikey?: Resolver<ResolversTypes['String'], ParentType, ContextType>;
|
||||
guid?: Resolver<ResolversTypes['String'], ParentType, ContextType>;
|
||||
@@ -2562,9 +2388,6 @@ export type SubscriptionResolvers<ContextType = Context, ParentType extends Reso
|
||||
apikeys?: SubscriptionResolver<Maybe<Array<Maybe<ResolversTypes['ApiKey']>>>, "apikeys", ParentType, ContextType>;
|
||||
array?: SubscriptionResolver<ResolversTypes['Array'], "array", ParentType, ContextType>;
|
||||
config?: SubscriptionResolver<ResolversTypes['Config'], "config", ParentType, ContextType>;
|
||||
crashReportingEnabled?: SubscriptionResolver<ResolversTypes['Boolean'], "crashReportingEnabled", ParentType, ContextType>;
|
||||
device?: SubscriptionResolver<ResolversTypes['Device'], "device", ParentType, ContextType, RequireFields<SubscriptiondeviceArgs, 'id'>>;
|
||||
devices?: SubscriptionResolver<Maybe<Array<ResolversTypes['Device']>>, "devices", ParentType, ContextType>;
|
||||
display?: SubscriptionResolver<Maybe<ResolversTypes['Display']>, "display", ParentType, ContextType>;
|
||||
dockerContainer?: SubscriptionResolver<ResolversTypes['DockerContainer'], "dockerContainer", ParentType, ContextType, RequireFields<SubscriptiondockerContainerArgs, 'id'>>;
|
||||
dockerContainers?: SubscriptionResolver<Maybe<Array<Maybe<ResolversTypes['DockerContainer']>>>, "dockerContainers", ParentType, ContextType>;
|
||||
@@ -2579,17 +2402,14 @@ export type SubscriptionResolvers<ContextType = Context, ParentType extends Reso
|
||||
parityHistory?: SubscriptionResolver<ResolversTypes['ParityCheck'], "parityHistory", ParentType, ContextType>;
|
||||
ping?: SubscriptionResolver<ResolversTypes['String'], "ping", ParentType, ContextType>;
|
||||
registration?: SubscriptionResolver<ResolversTypes['Registration'], "registration", ParentType, ContextType>;
|
||||
server?: SubscriptionResolver<ResolversTypes['Server'], "server", ParentType, ContextType, RequireFields<SubscriptionserverArgs, 'name'>>;
|
||||
servers?: SubscriptionResolver<Maybe<Array<ResolversTypes['Server']>>, "servers", ParentType, ContextType>;
|
||||
server?: SubscriptionResolver<Maybe<ResolversTypes['Server']>, "server", ParentType, ContextType>;
|
||||
service?: SubscriptionResolver<Maybe<Array<ResolversTypes['Service']>>, "service", ParentType, ContextType, RequireFields<SubscriptionserviceArgs, 'name'>>;
|
||||
share?: SubscriptionResolver<ResolversTypes['Share'], "share", ParentType, ContextType, RequireFields<SubscriptionshareArgs, 'id'>>;
|
||||
shares?: SubscriptionResolver<Maybe<Array<ResolversTypes['Share']>>, "shares", ParentType, ContextType>;
|
||||
twoFactor?: SubscriptionResolver<Maybe<ResolversTypes['TwoFactorWithoutToken']>, "twoFactor", ParentType, ContextType>;
|
||||
unassignedDevices?: SubscriptionResolver<Maybe<Array<ResolversTypes['UnassignedDevice']>>, "unassignedDevices", ParentType, ContextType>;
|
||||
user?: SubscriptionResolver<ResolversTypes['User'], "user", ParentType, ContextType, RequireFields<SubscriptionuserArgs, 'id'>>;
|
||||
users?: SubscriptionResolver<Array<Maybe<ResolversTypes['User']>>, "users", ParentType, ContextType>;
|
||||
vars?: SubscriptionResolver<ResolversTypes['Vars'], "vars", ParentType, ContextType>;
|
||||
vmNetworks?: SubscriptionResolver<Maybe<Array<ResolversTypes['VmNetwork']>>, "vmNetworks", ParentType, ContextType>;
|
||||
vms?: SubscriptionResolver<Maybe<ResolversTypes['Vms']>, "vms", ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
@@ -2603,29 +2423,6 @@ export type SystemResolvers<ContextType = Context, ParentType extends ResolversP
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export type TwoFactorLocalResolvers<ContextType = Context, ParentType extends ResolversParentTypes['TwoFactorLocal'] = ResolversParentTypes['TwoFactorLocal']> = ResolversObject<{
|
||||
enabled?: Resolver<Maybe<ResolversTypes['Boolean']>, ParentType, ContextType>;
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export type TwoFactorRemoteResolvers<ContextType = Context, ParentType extends ResolversParentTypes['TwoFactorRemote'] = ResolversParentTypes['TwoFactorRemote']> = ResolversObject<{
|
||||
enabled?: Resolver<Maybe<ResolversTypes['Boolean']>, ParentType, ContextType>;
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export type TwoFactorWithTokenResolvers<ContextType = Context, ParentType extends ResolversParentTypes['TwoFactorWithToken'] = ResolversParentTypes['TwoFactorWithToken']> = ResolversObject<{
|
||||
local?: Resolver<Maybe<ResolversTypes['TwoFactorLocal']>, ParentType, ContextType>;
|
||||
remote?: Resolver<Maybe<ResolversTypes['TwoFactorRemote']>, ParentType, ContextType>;
|
||||
token?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export type TwoFactorWithoutTokenResolvers<ContextType = Context, ParentType extends ResolversParentTypes['TwoFactorWithoutToken'] = ResolversParentTypes['TwoFactorWithoutToken']> = ResolversObject<{
|
||||
local?: Resolver<Maybe<ResolversTypes['TwoFactorLocal']>, ParentType, ContextType>;
|
||||
remote?: Resolver<Maybe<ResolversTypes['TwoFactorRemote']>, ParentType, ContextType>;
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export interface UUIDScalarConfig extends GraphQLScalarTypeConfig<ResolversTypes['UUID'], any> {
|
||||
name: 'UUID';
|
||||
}
|
||||
@@ -2702,7 +2499,7 @@ export type UserResolvers<ContextType = Context, ParentType extends ResolversPar
|
||||
id?: Resolver<ResolversTypes['ID'], ParentType, ContextType>;
|
||||
name?: Resolver<ResolversTypes['String'], ParentType, ContextType>;
|
||||
password?: Resolver<Maybe<ResolversTypes['Boolean']>, ParentType, ContextType>;
|
||||
role?: Resolver<ResolversTypes['String'], ParentType, ContextType>;
|
||||
roles?: Resolver<ResolversTypes['String'], ParentType, ContextType>;
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
@@ -2711,7 +2508,7 @@ export type UserAccountResolvers<ContextType = Context, ParentType extends Resol
|
||||
description?: Resolver<ResolversTypes['String'], ParentType, ContextType>;
|
||||
id?: Resolver<ResolversTypes['ID'], ParentType, ContextType>;
|
||||
name?: Resolver<ResolversTypes['String'], ParentType, ContextType>;
|
||||
role?: Resolver<ResolversTypes['String'], ParentType, ContextType>;
|
||||
roles?: Resolver<ResolversTypes['String'], ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export type VarsResolvers<ContextType = Context, ParentType extends ResolversParentTypes['Vars'] = ResolversParentTypes['Vars']> = ResolversObject<{
|
||||
@@ -2897,11 +2694,6 @@ export type VmDomainResolvers<ContextType = Context, ParentType extends Resolver
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export type VmNetworkResolvers<ContextType = Context, ParentType extends ResolversParentTypes['VmNetwork'] = ResolversParentTypes['VmNetwork']> = ResolversObject<{
|
||||
_placeholderType?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
}>;
|
||||
|
||||
export type VmsResolvers<ContextType = Context, ParentType extends ResolversParentTypes['Vms'] = ResolversParentTypes['Vms']> = ResolversObject<{
|
||||
domain?: Resolver<Maybe<Array<ResolversTypes['VmDomain']>>, ParentType, ContextType>;
|
||||
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||
@@ -2928,7 +2720,6 @@ export type Resolvers<ContextType = Context> = ResolversObject<{
|
||||
ContainerMount?: ContainerMountResolvers<ContextType>;
|
||||
ContainerPort?: ContainerPortResolvers<ContextType>;
|
||||
DateTime?: GraphQLScalarType;
|
||||
Device?: DeviceResolvers<ContextType>;
|
||||
Devices?: DevicesResolvers<ContextType>;
|
||||
Disk?: DiskResolvers<ContextType>;
|
||||
DiskPartition?: DiskPartitionResolvers<ContextType>;
|
||||
@@ -2956,22 +2747,16 @@ export type Resolvers<ContextType = Context> = ResolversObject<{
|
||||
ParityCheck?: ParityCheckResolvers<ContextType>;
|
||||
Partition?: PartitionResolvers<ContextType>;
|
||||
Pci?: PciResolvers<ContextType>;
|
||||
Permissions?: PermissionsResolvers<ContextType>;
|
||||
Port?: GraphQLScalarType;
|
||||
ProfileModel?: ProfileModelResolvers<ContextType>;
|
||||
Query?: QueryResolvers<ContextType>;
|
||||
Registration?: RegistrationResolvers<ContextType>;
|
||||
RelayResponse?: RelayResponseResolvers<ContextType>;
|
||||
Scope?: ScopeResolvers<ContextType>;
|
||||
Server?: ServerResolvers<ContextType>;
|
||||
Service?: ServiceResolvers<ContextType>;
|
||||
Share?: ShareResolvers<ContextType>;
|
||||
Subscription?: SubscriptionResolvers<ContextType>;
|
||||
System?: SystemResolvers<ContextType>;
|
||||
TwoFactorLocal?: TwoFactorLocalResolvers<ContextType>;
|
||||
TwoFactorRemote?: TwoFactorRemoteResolvers<ContextType>;
|
||||
TwoFactorWithToken?: TwoFactorWithTokenResolvers<ContextType>;
|
||||
TwoFactorWithoutToken?: TwoFactorWithoutTokenResolvers<ContextType>;
|
||||
UUID?: GraphQLScalarType;
|
||||
UnassignedDevice?: UnassignedDeviceResolvers<ContextType>;
|
||||
Uptime?: UptimeResolvers<ContextType>;
|
||||
@@ -2981,11 +2766,7 @@ export type Resolvers<ContextType = Context> = ResolversObject<{
|
||||
Vars?: VarsResolvers<ContextType>;
|
||||
Versions?: VersionsResolvers<ContextType>;
|
||||
VmDomain?: VmDomainResolvers<ContextType>;
|
||||
VmNetwork?: VmNetworkResolvers<ContextType>;
|
||||
Vms?: VmsResolvers<ContextType>;
|
||||
Welcome?: WelcomeResolvers<ContextType>;
|
||||
}>;
|
||||
|
||||
export type DirectiveResolvers<ContextType = Context> = ResolversObject<{
|
||||
subscription?: subscriptionDirectiveResolver<any, any, ContextType>;
|
||||
}>;
|
||||
|
||||
@@ -345,7 +345,7 @@ export type KsServerDetails = {
|
||||
flashProduct: Scalars['String']['output'];
|
||||
flashVendor: Scalars['String']['output'];
|
||||
guid: Scalars['String']['output'];
|
||||
ipsId: Scalars['String']['output'];
|
||||
ipsId?: Maybe<Scalars['String']['output']>;
|
||||
keyType?: Maybe<Scalars['String']['output']>;
|
||||
licenseKey: Scalars['String']['output'];
|
||||
name: Scalars['String']['output'];
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import { FatalAppError } from '@app/core/errors/fatal-error';
|
||||
import { graphqlLogger } from '@app/core/log';
|
||||
import { modules } from '@app/core';
|
||||
import { getters } from '@app/store';
|
||||
|
||||
export const getCoreModule = (moduleName: string) => {
|
||||
if (!Object.keys(modules).includes(moduleName)) {
|
||||
@@ -10,16 +8,3 @@ export const getCoreModule = (moduleName: string) => {
|
||||
|
||||
return modules[moduleName];
|
||||
};
|
||||
|
||||
export const apiKeyToUser = async (apiKey: string) => {
|
||||
try {
|
||||
const config = getters.config();
|
||||
if (apiKey === config.remote.apikey) return { id: -1, description: 'My servers service account', name: 'my_servers', role: 'my_servers' };
|
||||
if (apiKey === config.upc.apikey) return { id: -1, description: 'UPC service account', name: 'upc', role: 'upc' };
|
||||
if (apiKey === config.notifier.apikey) return { id: -1, description: 'Notifier service account', name: 'notifier', role: 'notifier' };
|
||||
} catch (error: unknown) {
|
||||
graphqlLogger.debug('Failed looking up API key with "%s"', (error as Error).message);
|
||||
}
|
||||
|
||||
return { id: -1, description: 'A guest user', name: 'guest', role: 'guest' };
|
||||
};
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { ensurePermission } from '@app/core/utils/index';
|
||||
import { NODE_ENV } from '@app/environment';
|
||||
import { type MutationResolvers } from '@app/graphql/generated/api/types';
|
||||
import {
|
||||
type ConnectSignInInput,
|
||||
} from '@app/graphql/generated/api/types';
|
||||
import { API_KEY_STATUS } from '@app/mothership/api-key/api-key-types';
|
||||
import { validateApiKeyWithKeyServer } from '@app/mothership/api-key/validate-api-key-with-keyserver';
|
||||
import { getters, store } from '@app/store/index';
|
||||
@@ -9,31 +10,26 @@ import { FileLoadStatus } from '@app/store/types';
|
||||
import { GraphQLError } from 'graphql';
|
||||
import { decodeJwt } from 'jose';
|
||||
|
||||
export const connectSignIn: MutationResolvers['connectSignIn'] = async (
|
||||
_,
|
||||
args,
|
||||
context
|
||||
) => {
|
||||
ensurePermission(context.user, {
|
||||
resource: 'connect',
|
||||
possession: 'own',
|
||||
action: 'update',
|
||||
});
|
||||
|
||||
export const connectSignIn = async (
|
||||
input: ConnectSignInInput
|
||||
): Promise<boolean> => {
|
||||
if (getters.emhttp().status === FileLoadStatus.LOADED) {
|
||||
const result = NODE_ENV === 'development' ? API_KEY_STATUS.API_KEY_VALID : await validateApiKeyWithKeyServer({
|
||||
apiKey: args.input.apiKey,
|
||||
flashGuid: getters.emhttp().var.flashGuid,
|
||||
});
|
||||
const result =
|
||||
NODE_ENV === 'development'
|
||||
? API_KEY_STATUS.API_KEY_VALID
|
||||
: await validateApiKeyWithKeyServer({
|
||||
apiKey: input.apiKey,
|
||||
flashGuid: getters.emhttp().var.flashGuid,
|
||||
});
|
||||
if (result !== API_KEY_STATUS.API_KEY_VALID) {
|
||||
throw new GraphQLError(
|
||||
`Validating API Key Failed with Error: ${result}`
|
||||
);
|
||||
}
|
||||
|
||||
const userInfo = args.input.idToken
|
||||
? decodeJwt(args.input.idToken)
|
||||
: args.input.userInfo ?? null;
|
||||
const userInfo = input.idToken
|
||||
? decodeJwt(input.idToken)
|
||||
: input.userInfo ?? null;
|
||||
if (
|
||||
!userInfo ||
|
||||
!userInfo.preferred_username ||
|
||||
@@ -47,10 +43,11 @@ export const connectSignIn: MutationResolvers['connectSignIn'] = async (
|
||||
// @TODO once we deprecate old sign in method, switch this to do all validation requests
|
||||
await store.dispatch(
|
||||
loginUser({
|
||||
avatar: typeof userInfo.avatar === 'string' ? userInfo.avatar : '',
|
||||
avatar:
|
||||
typeof userInfo.avatar === 'string' ? userInfo.avatar : '',
|
||||
username: userInfo.preferred_username,
|
||||
email: userInfo.email,
|
||||
apikey: args.input.apiKey,
|
||||
apikey: input.apiKey,
|
||||
})
|
||||
);
|
||||
return true;
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
import { ensurePermission } from '@app/core/utils/permissions/ensure-permission';
|
||||
import { type MutationResolvers } from '@app/graphql/generated/api/types';
|
||||
import { store } from '@app/store/index';
|
||||
import { logoutUser } from '@app/store/modules/config';
|
||||
|
||||
export const connectSignOut: MutationResolvers['connectSignOut'] = async (
|
||||
_,
|
||||
__,
|
||||
context
|
||||
) => {
|
||||
ensurePermission(context.user, {
|
||||
resource: 'connect',
|
||||
possession: 'own',
|
||||
action: 'update',
|
||||
});
|
||||
|
||||
await store.dispatch(logoutUser({ reason: 'Manual Sign Out With API' }));
|
||||
return true;
|
||||
};
|
||||
@@ -1,20 +0,0 @@
|
||||
import { getAllowedOrigins } from '@app/common/allowed-origins';
|
||||
import { ensurePermission } from '@app/core/utils/permissions/ensure-permission';
|
||||
import { type MutationResolvers } from '@app/graphql/generated/api/types';
|
||||
import { store } from '@app/store/index';
|
||||
import { updateAllowedOrigins } from '@app/store/modules/config';
|
||||
|
||||
export const setAdditionalAllowedOrigins: MutationResolvers['setAdditionalAllowedOrigins'] =
|
||||
async (_, args, context) => {
|
||||
ensurePermission(context.user, {
|
||||
resource: 'connect',
|
||||
possession: 'own',
|
||||
action: 'update',
|
||||
});
|
||||
|
||||
await store.dispatch(
|
||||
updateAllowedOrigins(args.input.origins)
|
||||
);
|
||||
|
||||
return getAllowedOrigins();
|
||||
};
|
||||
@@ -1,12 +0,0 @@
|
||||
import { type Resolvers } from '@app/graphql/generated/api/types';
|
||||
import { sendNotification } from './notifications';
|
||||
import { connectSignIn } from '@app/graphql/resolvers/mutation/connect/connect-sign-in';
|
||||
import { connectSignOut } from '@app/graphql/resolvers/mutation/connect/connect-sign-out';
|
||||
import { setAdditionalAllowedOrigins } from '@app/graphql/resolvers/mutation/connect/set-additional-allowed-origins';
|
||||
|
||||
export const Mutation: Resolvers['Mutation'] = {
|
||||
sendNotification,
|
||||
connectSignIn,
|
||||
connectSignOut,
|
||||
setAdditionalAllowedOrigins,
|
||||
};
|
||||
@@ -1,23 +0,0 @@
|
||||
/*!
|
||||
* Copyright 2021 Lime Technology Inc. All rights reserved.
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
|
||||
import { ensurePermission } from '@app/core/utils/permissions/ensure-permission';
|
||||
import { ConfigErrorState, type QueryResolvers } from '@app/graphql/generated/api/types';
|
||||
import { getters } from '@app/store';
|
||||
|
||||
export const config: QueryResolvers['config'] = async (_, __, context) => {
|
||||
ensurePermission(context.user, {
|
||||
resource: 'config',
|
||||
action: 'read',
|
||||
possession: 'any',
|
||||
});
|
||||
|
||||
const emhttp = getters.emhttp();
|
||||
|
||||
return {
|
||||
valid: emhttp.var.configValid,
|
||||
error: emhttp.var.configValid ? null : ConfigErrorState[emhttp.var.configState] ?? ConfigErrorState.UNKNOWN_ERROR
|
||||
};
|
||||
};
|
||||
@@ -1,16 +0,0 @@
|
||||
import { getDockerContainers } from "@app/core/modules/index";
|
||||
import { ensurePermission } from "@app/core/utils/permissions/ensure-permission";
|
||||
import { type QueryResolvers } from "@app/graphql/generated/api/types";
|
||||
|
||||
export const dockerContainersResolver: QueryResolvers['dockerContainers'] = async (_, __, context) => {
|
||||
const { user } = context;
|
||||
|
||||
// Check permissions
|
||||
ensurePermission(user, {
|
||||
resource: 'docker/container',
|
||||
action: 'read',
|
||||
possession: 'any',
|
||||
});
|
||||
|
||||
return getDockerContainers();
|
||||
}
|
||||
@@ -1,40 +0,0 @@
|
||||
import { getArray } from '@app/core/modules/get-array';
|
||||
import { type QueryResolvers } from '@app/graphql/generated/api/types';
|
||||
import cloud from '@app/graphql/resolvers/query/cloud';
|
||||
import { config } from '@app/graphql/resolvers/query/config';
|
||||
import crashReportingEnabled from '@app/graphql/resolvers/query/crash-reporting-enabled';
|
||||
import { disksResolver } from '@app/graphql/resolvers/query/disks';
|
||||
import display from '@app/graphql/resolvers/query/display';
|
||||
import { dockerContainersResolver } from '@app/graphql/resolvers/query/docker';
|
||||
import flash from '@app/graphql/resolvers/query/flash';
|
||||
import { notificationsResolver } from '@app/graphql/resolvers/query/notifications';
|
||||
import online from '@app/graphql/resolvers/query/online';
|
||||
import owner from '@app/graphql/resolvers/query/owner';
|
||||
import { registration } from '@app/graphql/resolvers/query/registration';
|
||||
import { server } from '@app/graphql/resolvers/query/server';
|
||||
import { servers } from '@app/graphql/resolvers/query/servers';
|
||||
import twoFactor from '@app/graphql/resolvers/query/two-factor';
|
||||
import { vmsResolver } from '@app/graphql/resolvers/query/vms';
|
||||
|
||||
export const Query: QueryResolvers = {
|
||||
array: getArray,
|
||||
cloud,
|
||||
config,
|
||||
crashReportingEnabled,
|
||||
disks: disksResolver,
|
||||
dockerContainers: dockerContainersResolver,
|
||||
display,
|
||||
flash,
|
||||
notifications: notificationsResolver,
|
||||
online,
|
||||
owner,
|
||||
registration,
|
||||
server,
|
||||
servers,
|
||||
twoFactor,
|
||||
vms: vmsResolver,
|
||||
info() {
|
||||
// Returns an empty object because the subfield resolvers live at the root (allows for partial fetching)
|
||||
return {};
|
||||
},
|
||||
};
|
||||
@@ -1,11 +1,9 @@
|
||||
import {
|
||||
baseboard,
|
||||
cpu,
|
||||
cpuFlags,
|
||||
mem,
|
||||
memLayout,
|
||||
osInfo,
|
||||
system,
|
||||
versions,
|
||||
} from 'systeminformation';
|
||||
import { docker } from '@app/core/utils/clients/docker';
|
||||
@@ -16,13 +14,10 @@ import {
|
||||
type Display,
|
||||
type Theme,
|
||||
type Temperature,
|
||||
type Baseboard,
|
||||
type Versions,
|
||||
type InfoMemory,
|
||||
type MemoryLayout,
|
||||
type System,
|
||||
type Devices,
|
||||
type InfoResolvers,
|
||||
type Gpu,
|
||||
} from '@app/graphql/generated/api/types';
|
||||
import { getters } from '@app/store';
|
||||
@@ -33,7 +28,6 @@ import toBytes from 'bytes';
|
||||
import { getUnraidVersion } from '@app/common/dashboard/get-unraid-version';
|
||||
import { AppError } from '@app/core/errors/app-error';
|
||||
import { cleanStdout } from '@app/core/utils/misc/clean-stdout';
|
||||
import { getMachineId } from '@app/core/utils/misc/get-machine-id';
|
||||
import { execaCommandSync, execa } from 'execa';
|
||||
import { pathExists } from 'path-exists';
|
||||
import { filter as asyncFilter } from 'p-iteration';
|
||||
@@ -58,19 +52,22 @@ export const generateApps = async (): Promise<InfoApps> => {
|
||||
return { installed, started };
|
||||
};
|
||||
|
||||
const generateOs = async (): Promise<InfoOs> => {
|
||||
export const generateOs = async (): Promise<InfoOs> => {
|
||||
const os = await osInfo();
|
||||
|
||||
return {
|
||||
...os,
|
||||
hostname: getters.emhttp().var.name,
|
||||
uptime: bootTimestamp.toISOString(),
|
||||
};
|
||||
};
|
||||
|
||||
const generateCpu = async (): Promise<InfoCpu> => {
|
||||
export const generateCpu = async (): Promise<InfoCpu> => {
|
||||
const { cores, physicalCores, speedMin, speedMax, stepping, ...rest } =
|
||||
await cpu();
|
||||
const flags = await cpuFlags().then((flags) => flags.split(' '));
|
||||
const flags = await cpuFlags()
|
||||
.then((flags) => flags.split(' '))
|
||||
.catch(() => []);
|
||||
|
||||
return {
|
||||
...rest,
|
||||
@@ -84,7 +81,7 @@ const generateCpu = async (): Promise<InfoCpu> => {
|
||||
};
|
||||
};
|
||||
|
||||
const generateDisplay = async (): Promise<Display> => {
|
||||
export const generateDisplay = async (): Promise<Display> => {
|
||||
const filePath = getters.paths()['dynamix-config'];
|
||||
const state = loadState<DynamixConfig>(filePath);
|
||||
if (!state) {
|
||||
@@ -110,9 +107,7 @@ const generateDisplay = async (): Promise<Display> => {
|
||||
};
|
||||
};
|
||||
|
||||
const generateBaseboard = async (): Promise<Baseboard> => baseboard();
|
||||
|
||||
const generateVersions = async (): Promise<Versions> => {
|
||||
export const generateVersions = async (): Promise<Versions> => {
|
||||
const unraid = await getUnraidVersion();
|
||||
const softwareVersions = await versions();
|
||||
|
||||
@@ -122,10 +117,10 @@ const generateVersions = async (): Promise<Versions> => {
|
||||
};
|
||||
};
|
||||
|
||||
const generateMemory = async (): Promise<InfoMemory> => {
|
||||
export const generateMemory = async (): Promise<InfoMemory> => {
|
||||
const layout = await memLayout().then((dims) =>
|
||||
dims.map((dim) => dim as MemoryLayout)
|
||||
);
|
||||
).catch(() => []);
|
||||
const info = await mem();
|
||||
let max = info.total;
|
||||
|
||||
@@ -175,7 +170,7 @@ const generateMemory = async (): Promise<InfoMemory> => {
|
||||
};
|
||||
};
|
||||
|
||||
const generateDevices = async (): Promise<Devices> => {
|
||||
export const generateDevices = async (): Promise<Devices> => {
|
||||
/**
|
||||
* Set device class to device.
|
||||
* @param device The device to modify.
|
||||
@@ -277,24 +272,24 @@ const generateDevices = async (): Promise<Devices> => {
|
||||
* @ignore
|
||||
* @private
|
||||
*/
|
||||
const systemGPUDevices: Promise<Gpu[]> = systemPciDevices().then(
|
||||
(devices) => {
|
||||
return devices.filter(
|
||||
(device) => device.class === 'vga' && !device.allowed
|
||||
).map(entry => {
|
||||
const gpu: Gpu = {
|
||||
blacklisted: entry.allowed,
|
||||
class: entry.class,
|
||||
id: entry.id,
|
||||
productid: entry.product,
|
||||
typeid: entry.typeid,
|
||||
type: entry.manufacturer,
|
||||
vendorname: entry.vendorname
|
||||
}
|
||||
return gpu;
|
||||
});
|
||||
}
|
||||
).catch(() => []);
|
||||
const systemGPUDevices: Promise<Gpu[]> = systemPciDevices()
|
||||
.then((devices) => {
|
||||
return devices
|
||||
.filter((device) => device.class === 'vga' && !device.allowed)
|
||||
.map((entry) => {
|
||||
const gpu: Gpu = {
|
||||
blacklisted: entry.allowed,
|
||||
class: entry.class,
|
||||
id: entry.id,
|
||||
productid: entry.product,
|
||||
typeid: entry.typeid,
|
||||
type: entry.manufacturer,
|
||||
vendorname: entry.vendorname,
|
||||
};
|
||||
return gpu;
|
||||
});
|
||||
})
|
||||
.catch(() => []);
|
||||
|
||||
/**
|
||||
* System usb devices.
|
||||
@@ -422,13 +417,15 @@ const generateDevices = async (): Promise<Devices> => {
|
||||
}) ?? [];
|
||||
|
||||
// Get all usb devices
|
||||
const usbDevices = await execa('lsusb').then(async ({ stdout }) =>
|
||||
parseUsbDevices(stdout)
|
||||
.map(parseDevice)
|
||||
.filter(filterBootDrive)
|
||||
.filter(filterUsbHubs)
|
||||
.map(sanitizeVendorName)
|
||||
);
|
||||
const usbDevices = await execa('lsusb')
|
||||
.then(async ({ stdout }) =>
|
||||
parseUsbDevices(stdout)
|
||||
.map(parseDevice)
|
||||
.filter(filterBootDrive)
|
||||
.filter(filterUsbHubs)
|
||||
.map(sanitizeVendorName)
|
||||
)
|
||||
.catch(() => []);
|
||||
|
||||
return usbDevices;
|
||||
} catch (error: unknown) {
|
||||
@@ -445,20 +442,3 @@ const generateDevices = async (): Promise<Devices> => {
|
||||
usb: await getSystemUSBDevices(),
|
||||
};
|
||||
};
|
||||
|
||||
const generateMachineId = async (): Promise<string> => getMachineId();
|
||||
|
||||
const generateSystem = async (): Promise<System> => system();
|
||||
|
||||
export const infoSubResolvers: InfoResolvers = {
|
||||
apps: async () => generateApps(),
|
||||
baseboard: async () => generateBaseboard(),
|
||||
cpu: async () => generateCpu(),
|
||||
devices: async () => generateDevices(),
|
||||
display: async () => generateDisplay(),
|
||||
machineId: async () => generateMachineId(),
|
||||
memory: async () => generateMemory(),
|
||||
os: async () => generateOs(),
|
||||
system: async () => generateSystem(),
|
||||
versions: async () => generateVersions(),
|
||||
};
|
||||
|
||||
@@ -1,42 +0,0 @@
|
||||
import { ensurePermission } from '@app/core/utils/permissions/ensure-permission';
|
||||
import { type QueryResolvers } from '@app/graphql/generated/api/types';
|
||||
import { getters } from '@app/store/index';
|
||||
|
||||
export const notificationsResolver: QueryResolvers['notifications'] = async (
|
||||
_,
|
||||
{ filter: { offset, limit, importance, type } },
|
||||
context
|
||||
) => {
|
||||
ensurePermission(context.user, {
|
||||
possession: 'any',
|
||||
resource: 'notifications',
|
||||
action: 'read',
|
||||
});
|
||||
|
||||
if (limit > 50) {
|
||||
throw new Error('Limit must be less than 50');
|
||||
}
|
||||
return Object.values(getters.notifications().notifications)
|
||||
.filter((notification) => {
|
||||
if (
|
||||
importance &&
|
||||
importance !== notification.importance
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
if (type && type !== notification.type) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
})
|
||||
.sort(
|
||||
(a, b) =>
|
||||
new Date(b.timestamp ?? 0).getTime() -
|
||||
new Date(a.timestamp ?? 0).getTime()
|
||||
)
|
||||
.slice(
|
||||
offset,
|
||||
limit + offset
|
||||
);
|
||||
};
|
||||
@@ -1 +0,0 @@
|
||||
export default () => true;
|
||||
@@ -1,40 +0,0 @@
|
||||
/*!
|
||||
* Copyright 2021 Lime Technology Inc. All rights reserved.
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
|
||||
import { getKeyFile } from '@app/core/utils/misc/get-key-file';
|
||||
import { ensurePermission } from '@app/core/utils/permissions/ensure-permission';
|
||||
import { type Registration, type QueryResolvers } from '@app/graphql/generated/api/types';
|
||||
import { getters } from '@app/store';
|
||||
import { FileLoadStatus } from '@app/store/types';
|
||||
|
||||
export const registration: QueryResolvers['registration'] = async (_, __, context) => {
|
||||
ensurePermission(context.user, {
|
||||
resource: 'registration',
|
||||
action: 'read',
|
||||
possession: 'any',
|
||||
});
|
||||
|
||||
const emhttp = getters.emhttp();
|
||||
if (emhttp.status !== FileLoadStatus.LOADED || !emhttp.var?.regTy) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const isTrial = emhttp.var.regTy?.toLowerCase() === 'trial';
|
||||
const isExpired = emhttp.var.regTy.includes('expired');
|
||||
|
||||
const registration: Registration = {
|
||||
guid: emhttp.var.regGuid,
|
||||
type: emhttp.var.regTy,
|
||||
state: emhttp.var.regState,
|
||||
// Based on https://github.com/unraid/dynamix.unraid.net/blob/c565217fa8b2acf23943dc5c22a12d526cdf70a1/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/include/state.php#L64
|
||||
expiration:
|
||||
(1_000 * (isTrial || isExpired ? Number(emhttp.var.regTm2) : 0)).toString(),
|
||||
keyFile: {
|
||||
location: emhttp.var.regFile,
|
||||
contents: await getKeyFile(),
|
||||
},
|
||||
};
|
||||
return registration;
|
||||
};
|
||||
@@ -1,16 +0,0 @@
|
||||
import { getServers } from '@app/graphql/schema/utils';
|
||||
import { ensurePermission } from '@app/core/utils/permissions/ensure-permission';
|
||||
import { type QueryResolvers } from '@app/graphql/generated/api/types';
|
||||
|
||||
export const server: QueryResolvers['server'] = async (_: unknown, { name }, context) => {
|
||||
ensurePermission(context.user, {
|
||||
resource: 'servers',
|
||||
action: 'read',
|
||||
possession: 'any',
|
||||
});
|
||||
|
||||
const servers = getServers();
|
||||
|
||||
// Single server
|
||||
return servers.find(server => server.name === name) ?? undefined;
|
||||
};
|
||||
@@ -1,29 +0,0 @@
|
||||
import { getServers } from '@app/graphql/schema/utils';
|
||||
import { ensurePermission } from '@app/core/utils/permissions/ensure-permission';
|
||||
import { ServerStatus, type Resolvers } from '../../generated/api/types';
|
||||
|
||||
export const servers: NonNullable<Resolvers['Query']>['servers'] = async (_, __, context) => {
|
||||
ensurePermission(context.user, {
|
||||
resource: 'servers',
|
||||
action: 'read',
|
||||
possession: 'any',
|
||||
});
|
||||
|
||||
// All servers
|
||||
const servers = getServers().map(server => ({
|
||||
...server,
|
||||
apikey: server.apikey ?? '',
|
||||
guid: server.guid ?? '',
|
||||
lanip: server.lanip ?? '',
|
||||
localurl: server.localurl ?? '',
|
||||
wanip: server.wanip ?? '',
|
||||
name: server.name ?? '',
|
||||
owner: {
|
||||
...server.owner,
|
||||
username: server.owner?.username ?? ''
|
||||
},
|
||||
remoteurl: server.remoteurl ?? '',
|
||||
status: server.status ?? ServerStatus.OFFLINE
|
||||
}))
|
||||
return servers;
|
||||
};
|
||||
@@ -1 +0,0 @@
|
||||
export const vmsResolver = () => ({});
|
||||
@@ -1,28 +0,0 @@
|
||||
import { DateTimeResolver, JSONResolver, PortResolver, UUIDResolver } from 'graphql-scalars';
|
||||
|
||||
import { Query } from '@app/graphql/resolvers/query';
|
||||
import { Mutation } from '@app/graphql/resolvers/mutation';
|
||||
import { Subscription } from '@app/graphql/resolvers/subscription';
|
||||
import { UserAccount } from '@app/graphql/resolvers/user-account';
|
||||
import { type Resolvers } from '../generated/api/types';
|
||||
import { infoSubResolvers } from './query/info';
|
||||
import { GraphQLLong } from '@app/graphql/resolvers/graphql-type-long';
|
||||
import { domainResolver } from '@app/core/modules/index';
|
||||
|
||||
export const resolvers: Resolvers = {
|
||||
JSON: JSONResolver,
|
||||
Long: GraphQLLong,
|
||||
UUID: UUIDResolver,
|
||||
DateTime: DateTimeResolver,
|
||||
Port: PortResolver,
|
||||
Query,
|
||||
Mutation,
|
||||
Subscription,
|
||||
UserAccount,
|
||||
Info: {
|
||||
...infoSubResolvers,
|
||||
},
|
||||
Vms: {
|
||||
domain: domainResolver,
|
||||
},
|
||||
};
|
||||
@@ -1,6 +1,6 @@
|
||||
import { dashboardLogger } from '@app/core/log';
|
||||
import { generateData } from '@app/common/dashboard/generate-data';
|
||||
import { pubsub } from '@app/core/pubsub';
|
||||
import { PUBSUB_CHANNEL, pubsub } from '@app/core/pubsub';
|
||||
import { getters, store } from '@app/store';
|
||||
import { saveDataPacket } from '@app/store/modules/dashboard';
|
||||
import { isEqual } from 'lodash';
|
||||
@@ -63,12 +63,10 @@ export const publishToDashboard = async () => {
|
||||
store.dispatch(saveDataPacket({ lastDataPacket: dataPacket }));
|
||||
|
||||
// Publish the updated data
|
||||
dashboardLogger.addContext('update', dataPacket);
|
||||
dashboardLogger.trace('Publishing update');
|
||||
dashboardLogger.removeContext('update');
|
||||
dashboardLogger.trace({ dataPacket } , 'Publishing update');
|
||||
|
||||
// Update local clients
|
||||
await pubsub.publish('dashboard', {
|
||||
await pubsub.publish(PUBSUB_CHANNEL.DASHBOARD, {
|
||||
dashboard: dataPacket,
|
||||
});
|
||||
if (dataPacket) {
|
||||
|
||||
@@ -1,69 +0,0 @@
|
||||
import { PUBSUB_CHANNEL, pubsub } from '@app/core/pubsub';
|
||||
import { ensurePermission } from '@app/core/utils/permissions/ensure-permission';
|
||||
import { type Resolvers } from '@app/graphql/generated/api/types';
|
||||
import { createSubscription } from '@app/graphql/schema/utils';
|
||||
|
||||
export const Subscription: Resolvers['Subscription'] = {
|
||||
display: {
|
||||
...createSubscription('display'),
|
||||
},
|
||||
apikeys: {
|
||||
// Not sure how we're going to secure this
|
||||
// ...createSubscription('apikeys')
|
||||
},
|
||||
config: {
|
||||
...createSubscription('config'),
|
||||
},
|
||||
array: {
|
||||
...createSubscription('array'),
|
||||
},
|
||||
dockerContainers: {
|
||||
...createSubscription('docker/container'),
|
||||
},
|
||||
dockerNetworks: {
|
||||
...createSubscription('docker/network'),
|
||||
},
|
||||
notificationAdded: {
|
||||
subscribe: (_parent, _args, context) => {
|
||||
ensurePermission(context.user, {
|
||||
possession: 'any',
|
||||
resource: 'notifications',
|
||||
action: 'read',
|
||||
});
|
||||
return {
|
||||
[Symbol.asyncIterator]: () =>
|
||||
pubsub.asyncIterator(PUBSUB_CHANNEL.NOTIFICATION),
|
||||
};
|
||||
},
|
||||
},
|
||||
info: {
|
||||
...createSubscription('info'),
|
||||
},
|
||||
servers: {
|
||||
...createSubscription('servers'),
|
||||
},
|
||||
shares: {
|
||||
...createSubscription('shares'),
|
||||
},
|
||||
unassignedDevices: {
|
||||
...createSubscription('devices/unassigned'),
|
||||
},
|
||||
users: {
|
||||
...createSubscription('users'),
|
||||
},
|
||||
vars: {
|
||||
...createSubscription('vars'),
|
||||
},
|
||||
vms: {
|
||||
...createSubscription('vms'),
|
||||
},
|
||||
registration: {
|
||||
...createSubscription('registration'),
|
||||
},
|
||||
online: {
|
||||
...createSubscription('online'),
|
||||
},
|
||||
owner: {
|
||||
...createSubscription('owner'),
|
||||
},
|
||||
};
|
||||
@@ -1,60 +1,82 @@
|
||||
import { GraphQLClient } from '@app/mothership/graphql-client';
|
||||
import { type Nginx } from '@app/core/types/states/nginx';
|
||||
import { type RootState, store, getters } from '@app/store';
|
||||
import { type NetworkInput, URL_TYPE, type AccessUrlInput } from '@app/graphql/generated/client/graphql';
|
||||
import {
|
||||
type NetworkInput,
|
||||
URL_TYPE,
|
||||
type AccessUrlInput,
|
||||
} from '@app/graphql/generated/client/graphql';
|
||||
import { dashboardLogger, logger } from '@app/core';
|
||||
import { isEqual } from 'lodash';
|
||||
import { SEND_NETWORK_MUTATION } from '@app/graphql/mothership/mutations';
|
||||
import { saveNetworkPacket } from '@app/store/modules/dashboard';
|
||||
import { ApolloError } from '@apollo/client/core/core.cjs';
|
||||
import { AccessUrlInputSchema, NetworkInputSchema } from '@app/graphql/generated/client/validators';
|
||||
import {
|
||||
AccessUrlInputSchema,
|
||||
NetworkInputSchema,
|
||||
} from '@app/graphql/generated/client/validators';
|
||||
import { ZodError } from 'zod';
|
||||
|
||||
interface UrlForFieldInput {
|
||||
url: string;
|
||||
port?: number;
|
||||
portSsl?: number;
|
||||
url: string;
|
||||
port?: number;
|
||||
portSsl?: number;
|
||||
}
|
||||
|
||||
interface UrlForFieldInputSecure extends UrlForFieldInput {
|
||||
url: string;
|
||||
portSsl: number;
|
||||
url: string;
|
||||
portSsl: number;
|
||||
}
|
||||
interface UrlForFieldInputInsecure extends UrlForFieldInput {
|
||||
url: string;
|
||||
port: number;
|
||||
url: string;
|
||||
port: number;
|
||||
}
|
||||
|
||||
export const getUrlForField = ({ url, port, portSsl }: UrlForFieldInputInsecure | UrlForFieldInputSecure) => {
|
||||
let portToUse = '';
|
||||
let httpMode = 'https://';
|
||||
export const getUrlForField = ({
|
||||
url,
|
||||
port,
|
||||
portSsl,
|
||||
}: UrlForFieldInputInsecure | UrlForFieldInputSecure) => {
|
||||
let portToUse = '';
|
||||
let httpMode = 'https://';
|
||||
|
||||
if (!url || url === '') {
|
||||
throw new Error('No URL Provided');
|
||||
}
|
||||
if (!url || url === '') {
|
||||
throw new Error('No URL Provided');
|
||||
}
|
||||
|
||||
if (port) {
|
||||
portToUse = port === 80 ? '' : `:${port}`;
|
||||
httpMode = 'http://';
|
||||
} else if (portSsl) {
|
||||
portToUse = portSsl === 443 ? '' : `:${portSsl}`;
|
||||
httpMode = 'https://';
|
||||
} else {
|
||||
throw new Error(`No ports specified for URL: ${url}`);
|
||||
}
|
||||
if (port) {
|
||||
portToUse = port === 80 ? '' : `:${port}`;
|
||||
httpMode = 'http://';
|
||||
} else if (portSsl) {
|
||||
portToUse = portSsl === 443 ? '' : `:${portSsl}`;
|
||||
httpMode = 'https://';
|
||||
} else {
|
||||
throw new Error(`No ports specified for URL: ${url}`);
|
||||
}
|
||||
|
||||
const urlString = `${httpMode}${url}${portToUse}`;
|
||||
const urlString = `${httpMode}${url}${portToUse}`;
|
||||
|
||||
try {
|
||||
return new URL(urlString);
|
||||
} catch (error: unknown) {
|
||||
throw new Error(`Failed to parse URL: ${urlString}`);
|
||||
}
|
||||
try {
|
||||
return new URL(urlString);
|
||||
} catch (error: unknown) {
|
||||
throw new Error(`Failed to parse URL: ${urlString}`);
|
||||
}
|
||||
};
|
||||
|
||||
const fieldIsFqdn = (field: keyof Nginx) => field?.toLowerCase().includes('fqdn');
|
||||
const fieldIsFqdn = (field: keyof Nginx) =>
|
||||
field?.toLowerCase().includes('fqdn');
|
||||
|
||||
export type NginxUrlFields = Extract<keyof Nginx, 'lanIp' | 'lanIp6' | 'lanName' | 'lanMdns' | 'lanFqdn' | 'lanFqdn6' | 'wanFqdn' | 'wanFqdn6'>;
|
||||
export type NginxUrlFields = Extract<
|
||||
keyof Nginx,
|
||||
| 'lanIp'
|
||||
| 'lanIp6'
|
||||
| 'lanName'
|
||||
| 'lanMdns'
|
||||
| 'lanFqdn'
|
||||
| 'lanFqdn6'
|
||||
| 'wanFqdn'
|
||||
| 'wanFqdn6'
|
||||
>;
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -63,254 +85,307 @@ export type NginxUrlFields = Extract<keyof Nginx, 'lanIp' | 'lanIp6' | 'lanName'
|
||||
* @returns a URL, created from the combination of inputs
|
||||
* @throws Error when the URL cannot be created or the URL is invalid
|
||||
*/
|
||||
export const getUrlForServer = ({ nginx, field }: { nginx: Nginx; field: NginxUrlFields }): URL => {
|
||||
if (nginx[field]) {
|
||||
if (fieldIsFqdn(field)) {
|
||||
return getUrlForField({ url: nginx[field], portSsl: nginx.httpsPort });
|
||||
}
|
||||
export const getUrlForServer = ({
|
||||
nginx,
|
||||
field,
|
||||
}: {
|
||||
nginx: Nginx;
|
||||
field: NginxUrlFields;
|
||||
}): URL => {
|
||||
if (nginx[field]) {
|
||||
if (fieldIsFqdn(field)) {
|
||||
return getUrlForField({
|
||||
url: nginx[field],
|
||||
portSsl: nginx.httpsPort,
|
||||
});
|
||||
}
|
||||
|
||||
if (!nginx.sslEnabled) {// Use SSL = no
|
||||
return getUrlForField({ url: nginx[field], port: nginx.httpPort });
|
||||
}
|
||||
if (!nginx.sslEnabled) {
|
||||
// Use SSL = no
|
||||
return getUrlForField({ url: nginx[field], port: nginx.httpPort });
|
||||
}
|
||||
|
||||
if (nginx.sslMode === 'yes') {
|
||||
return getUrlForField({ url: nginx[field], portSsl: nginx.httpsPort });
|
||||
}
|
||||
if (nginx.sslMode === 'yes') {
|
||||
return getUrlForField({
|
||||
url: nginx[field],
|
||||
portSsl: nginx.httpsPort,
|
||||
});
|
||||
}
|
||||
|
||||
if (nginx.sslMode === 'auto') {
|
||||
throw new Error(`Cannot get IP Based URL for field: "${field}" SSL mode auto`);
|
||||
}
|
||||
}
|
||||
if (nginx.sslMode === 'auto') {
|
||||
throw new Error(
|
||||
`Cannot get IP Based URL for field: "${field}" SSL mode auto`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`IP URL Resolver: Could not resolve any access URL for field: "${field}", is FQDN?: ${fieldIsFqdn(field)}`);
|
||||
throw new Error(
|
||||
`IP URL Resolver: Could not resolve any access URL for field: "${field}", is FQDN?: ${fieldIsFqdn(
|
||||
field
|
||||
)}`
|
||||
);
|
||||
};
|
||||
|
||||
// eslint-disable-next-line complexity
|
||||
export const getServerIps = (state: RootState = store.getState()): { urls: AccessUrlInput[]; errors: Error[] } => {
|
||||
const { nginx } = state.emhttp;
|
||||
const { remote: { wanport } } = state.config;
|
||||
if (!nginx || Object.keys(nginx).length === 0) {
|
||||
return { urls: [], errors: [new Error('Nginx Not Loaded')] };
|
||||
}
|
||||
export const getServerIps = (
|
||||
state: RootState = store.getState()
|
||||
): { urls: AccessUrlInput[]; errors: Error[] } => {
|
||||
const { nginx } = state.emhttp;
|
||||
const {
|
||||
remote: { wanport },
|
||||
} = state.config;
|
||||
if (!nginx || Object.keys(nginx).length === 0) {
|
||||
return { urls: [], errors: [new Error('Nginx Not Loaded')] };
|
||||
}
|
||||
|
||||
const errors: Error[] = [];
|
||||
const urls: AccessUrlInput[] = [];
|
||||
const errors: Error[] = [];
|
||||
const urls: AccessUrlInput[] = [];
|
||||
|
||||
try {
|
||||
// Default URL
|
||||
const defaultUrl = new URL(nginx.defaultUrl);
|
||||
urls.push({
|
||||
name: 'Default',
|
||||
type: URL_TYPE.DEFAULT,
|
||||
ipv4: defaultUrl,
|
||||
ipv6: defaultUrl,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
try {
|
||||
// Default URL
|
||||
const defaultUrl = new URL(nginx.defaultUrl);
|
||||
urls.push({
|
||||
name: 'Default',
|
||||
type: URL_TYPE.DEFAULT,
|
||||
ipv4: defaultUrl,
|
||||
ipv6: defaultUrl,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Lan IP URL
|
||||
const lanIp4Url = getUrlForServer({ nginx, field: 'lanIp' });
|
||||
urls.push({
|
||||
name: 'LAN IPv4',
|
||||
type: URL_TYPE.LAN,
|
||||
ipv4: lanIp4Url,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
try {
|
||||
// Lan IP URL
|
||||
const lanIp4Url = getUrlForServer({ nginx, field: 'lanIp' });
|
||||
urls.push({
|
||||
name: 'LAN IPv4',
|
||||
type: URL_TYPE.LAN,
|
||||
ipv4: lanIp4Url,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Lan IP6 URL
|
||||
const lanIp6Url = getUrlForServer({ nginx, field: 'lanIp6' });
|
||||
urls.push({
|
||||
name: 'LAN IPv6',
|
||||
type: URL_TYPE.LAN,
|
||||
ipv4: lanIp6Url,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
try {
|
||||
// Lan IP6 URL
|
||||
const lanIp6Url = getUrlForServer({ nginx, field: 'lanIp6' });
|
||||
urls.push({
|
||||
name: 'LAN IPv6',
|
||||
type: URL_TYPE.LAN,
|
||||
ipv4: lanIp6Url,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Lan Name URL
|
||||
const lanNameUrl = getUrlForServer({ nginx, field: 'lanName' });
|
||||
urls.push({
|
||||
name: 'LAN Name',
|
||||
type: URL_TYPE.MDNS,
|
||||
ipv4: lanNameUrl,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
try {
|
||||
// Lan Name URL
|
||||
const lanNameUrl = getUrlForServer({ nginx, field: 'lanName' });
|
||||
urls.push({
|
||||
name: 'LAN Name',
|
||||
type: URL_TYPE.MDNS,
|
||||
ipv4: lanNameUrl,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Lan MDNS URL
|
||||
const lanMdnsUrl = getUrlForServer({ nginx, field: 'lanMdns' });
|
||||
urls.push({
|
||||
name: 'LAN MDNS',
|
||||
type: URL_TYPE.MDNS,
|
||||
ipv4: lanMdnsUrl,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
try {
|
||||
// Lan MDNS URL
|
||||
const lanMdnsUrl = getUrlForServer({ nginx, field: 'lanMdns' });
|
||||
urls.push({
|
||||
name: 'LAN MDNS',
|
||||
type: URL_TYPE.MDNS,
|
||||
ipv4: lanMdnsUrl,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Lan FQDN URL
|
||||
const lanFqdnUrl = getUrlForServer({ nginx, field: 'lanFqdn' });
|
||||
urls.push({
|
||||
name: 'LAN FQDN',
|
||||
type: URL_TYPE.LAN,
|
||||
ipv4: lanFqdnUrl,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
try {
|
||||
// Lan FQDN URL
|
||||
const lanFqdnUrl = getUrlForServer({ nginx, field: 'lanFqdn' });
|
||||
urls.push({
|
||||
name: 'LAN FQDN',
|
||||
type: URL_TYPE.LAN,
|
||||
ipv4: lanFqdnUrl,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Lan FQDN6 URL
|
||||
const lanFqdn6Url = getUrlForServer({ nginx, field: 'lanFqdn6' });
|
||||
urls.push({
|
||||
name: 'LAN FQDNv6',
|
||||
type: URL_TYPE.LAN,
|
||||
ipv6: lanFqdn6Url,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
try {
|
||||
// Lan FQDN6 URL
|
||||
const lanFqdn6Url = getUrlForServer({ nginx, field: 'lanFqdn6' });
|
||||
urls.push({
|
||||
name: 'LAN FQDNv6',
|
||||
type: URL_TYPE.LAN,
|
||||
ipv6: lanFqdn6Url,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// WAN FQDN URL
|
||||
const wanFqdnUrl = getUrlForField({ url: nginx.wanFqdn, portSsl: Number(wanport || 443) });
|
||||
urls.push({
|
||||
name: 'WAN FQDN',
|
||||
type: URL_TYPE.WAN,
|
||||
ipv4: wanFqdnUrl,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
try {
|
||||
// WAN FQDN URL
|
||||
const wanFqdnUrl = getUrlForField({
|
||||
url: nginx.wanFqdn,
|
||||
portSsl: Number(wanport || 443),
|
||||
});
|
||||
urls.push({
|
||||
name: 'WAN FQDN',
|
||||
type: URL_TYPE.WAN,
|
||||
ipv4: wanFqdnUrl,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// WAN FQDN6 URL
|
||||
const wanFqdn6Url = getUrlForField({ url: nginx.wanFqdn6, portSsl: Number(wanport) });
|
||||
urls.push({
|
||||
name: 'WAN FQDNv6',
|
||||
type: URL_TYPE.WAN,
|
||||
ipv6: wanFqdn6Url,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
try {
|
||||
// WAN FQDN6 URL
|
||||
const wanFqdn6Url = getUrlForField({
|
||||
url: nginx.wanFqdn6,
|
||||
portSsl: Number(wanport),
|
||||
});
|
||||
urls.push({
|
||||
name: 'WAN FQDNv6',
|
||||
type: URL_TYPE.WAN,
|
||||
ipv6: wanFqdn6Url,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
|
||||
for (const wgFqdn of nginx.wgFqdns) {
|
||||
try {
|
||||
// WG FQDN URL
|
||||
const wgFqdnUrl = getUrlForField({ url: wgFqdn.fqdn, portSsl: nginx.httpsPort });
|
||||
urls.push({
|
||||
name: `WG FQDN ${wgFqdn.id}`,
|
||||
type: URL_TYPE.WIREGUARD,
|
||||
ipv4: wgFqdnUrl,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const wgFqdn of nginx.wgFqdns) {
|
||||
try {
|
||||
// WG FQDN URL
|
||||
const wgFqdnUrl = getUrlForField({
|
||||
url: wgFqdn.fqdn,
|
||||
portSsl: nginx.httpsPort,
|
||||
});
|
||||
urls.push({
|
||||
name: `WG FQDN ${wgFqdn.id}`,
|
||||
type: URL_TYPE.WIREGUARD,
|
||||
ipv4: wgFqdnUrl,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const safeUrls = urls.map((url) => AccessUrlInputSchema().safeParse(url)).reduce<AccessUrlInput[]>((acc, curr) => {
|
||||
if (curr.success) {
|
||||
acc.push(curr.data)
|
||||
} else {
|
||||
errors.push(curr.error)
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
const safeUrls = urls
|
||||
.map((url) => AccessUrlInputSchema().safeParse(url))
|
||||
.reduce<AccessUrlInput[]>((acc, curr) => {
|
||||
if (curr.success) {
|
||||
acc.push(curr.data);
|
||||
} else {
|
||||
errors.push(curr.error);
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
return { urls: safeUrls, errors };
|
||||
return { urls: safeUrls, errors };
|
||||
};
|
||||
|
||||
export const publishNetwork = async () => {
|
||||
try {
|
||||
const client = GraphQLClient.getInstance();
|
||||
try {
|
||||
const client = GraphQLClient.getInstance();
|
||||
|
||||
const datapacket = getServerIps();
|
||||
if (datapacket.errors ) {
|
||||
const zodErrors = datapacket.errors.filter(error => error instanceof ZodError)
|
||||
if (zodErrors.length) {
|
||||
dashboardLogger.warn('Validation Errors Encountered with Network Payload: %s', zodErrors.map(error => error.message).join(','))
|
||||
}
|
||||
}
|
||||
const networkPacket: NetworkInput = { accessUrls: datapacket.urls }
|
||||
const validatedNetwork = NetworkInputSchema().parse(networkPacket);
|
||||
|
||||
const { lastNetworkPacket } = getters.dashboard();
|
||||
const { apikey: apiKey } = getters.config().remote;
|
||||
if (isEqual(JSON.stringify(lastNetworkPacket), JSON.stringify(validatedNetwork))) {
|
||||
dashboardLogger.trace('[DASHBOARD] Skipping Update');
|
||||
} else if (client) {
|
||||
dashboardLogger.addContext('data', validatedNetwork);
|
||||
dashboardLogger.info('Sending data packet for network');
|
||||
dashboardLogger.removeContext('data');
|
||||
const result = await client.mutate({
|
||||
mutation: SEND_NETWORK_MUTATION,
|
||||
variables: {
|
||||
apiKey,
|
||||
data: validatedNetwork,
|
||||
},
|
||||
});
|
||||
dashboardLogger.addContext('sendNetworkResult', result);
|
||||
dashboardLogger.debug('Sent network mutation with %s urls', datapacket.urls.length);
|
||||
dashboardLogger.removeContext('sendNetworkResult');
|
||||
store.dispatch(saveNetworkPacket({ lastNetworkPacket: validatedNetwork }));
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
dashboardLogger.trace('ERROR', error);
|
||||
if (error instanceof ApolloError) {
|
||||
dashboardLogger.error('Failed publishing with GQL Errors: %s, \nClient Errors: %s', error.graphQLErrors.map(error => error.message).join(','), error.clientErrors.join(', '));
|
||||
} else {
|
||||
dashboardLogger.error(error);
|
||||
}
|
||||
}
|
||||
const datapacket = getServerIps();
|
||||
if (datapacket.errors) {
|
||||
const zodErrors = datapacket.errors.filter(
|
||||
(error) => error instanceof ZodError
|
||||
);
|
||||
if (zodErrors.length) {
|
||||
dashboardLogger.warn(
|
||||
'Validation Errors Encountered with Network Payload: %s',
|
||||
zodErrors.map((error) => error.message).join(',')
|
||||
);
|
||||
}
|
||||
}
|
||||
const networkPacket: NetworkInput = { accessUrls: datapacket.urls };
|
||||
const validatedNetwork = NetworkInputSchema().parse(networkPacket);
|
||||
|
||||
const { lastNetworkPacket } = getters.dashboard();
|
||||
const { apikey: apiKey } = getters.config().remote;
|
||||
if (
|
||||
isEqual(
|
||||
JSON.stringify(lastNetworkPacket),
|
||||
JSON.stringify(validatedNetwork)
|
||||
)
|
||||
) {
|
||||
dashboardLogger.trace('[DASHBOARD] Skipping Update');
|
||||
} else if (client) {
|
||||
dashboardLogger.info(
|
||||
{ validatedNetwork },
|
||||
'Sending data packet for network'
|
||||
);
|
||||
const result = await client.mutate({
|
||||
mutation: SEND_NETWORK_MUTATION,
|
||||
variables: {
|
||||
apiKey,
|
||||
data: validatedNetwork,
|
||||
},
|
||||
});
|
||||
dashboardLogger.debug(
|
||||
{ result },
|
||||
'Sent network mutation with %s urls',
|
||||
datapacket.urls.length
|
||||
);
|
||||
store.dispatch(
|
||||
saveNetworkPacket({ lastNetworkPacket: validatedNetwork })
|
||||
);
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
dashboardLogger.trace('ERROR', error);
|
||||
if (error instanceof ApolloError) {
|
||||
dashboardLogger.error(
|
||||
'Failed publishing with GQL Errors: %s, \nClient Errors: %s',
|
||||
error.graphQLErrors.map((error) => error.message).join(','),
|
||||
error.clientErrors.join(', ')
|
||||
);
|
||||
} else {
|
||||
dashboardLogger.error(error);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -13,9 +13,7 @@ import { getters } from '@app/store/index';
|
||||
export const executeRemoteGraphQLQuery = async (
|
||||
data: RemoteGraphQLEventFragmentFragment['remoteGraphQLEventData']
|
||||
) => {
|
||||
remoteQueryLogger.addContext('data', data);
|
||||
remoteQueryLogger.debug('Executing remote query');
|
||||
remoteQueryLogger.removeContext('data');
|
||||
remoteQueryLogger.debug({ query: data }, 'Executing remote query');
|
||||
const client = GraphQLClient.getInstance();
|
||||
const apiKey = getters.config().remote.apikey;
|
||||
const originalBody = data.body;
|
||||
@@ -25,18 +23,14 @@ export const executeRemoteGraphQLQuery = async (
|
||||
upcApiKey: apiKey
|
||||
});
|
||||
if (ENVIRONMENT === 'development') {
|
||||
remoteQueryLogger.addContext('query', parsedQuery.query);
|
||||
remoteQueryLogger.debug('[DEVONLY] Running query');
|
||||
remoteQueryLogger.removeContext('query');
|
||||
remoteQueryLogger.debug({ query: parsedQuery.query }, '[DEVONLY] Running query');
|
||||
}
|
||||
const localResult = await localClient.query({
|
||||
query: parsedQuery.query,
|
||||
variables: parsedQuery.variables,
|
||||
});
|
||||
if (localResult.data) {
|
||||
remoteQueryLogger.addContext('data', localResult.data);
|
||||
remoteQueryLogger.trace('Got data from remoteQuery request', data.sha256);
|
||||
remoteQueryLogger.removeContext('data')
|
||||
remoteQueryLogger.trace({ data: localResult.data }, 'Got data from remoteQuery request', data.sha256);
|
||||
|
||||
await client?.mutate({
|
||||
mutation: SEND_REMOTE_QUERY_RESPONSE,
|
||||
@@ -77,8 +71,6 @@ export const executeRemoteGraphQLQuery = async (
|
||||
} catch (error) {
|
||||
remoteQueryLogger.warn('Could not respond %o', error);
|
||||
}
|
||||
remoteQueryLogger.addContext('error', err);
|
||||
remoteQueryLogger.error('Error executing remote query %s', err instanceof Error ? err.message: 'Unknown Error');
|
||||
remoteQueryLogger.removeContext('error');
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
export const UserAccount = {
|
||||
__resolveType(obj: Record<string, unknown>) {
|
||||
// Only a user has a password field, the current user aka "me" doesn't.
|
||||
return obj.password ? 'User' : 'Me';
|
||||
},
|
||||
};
|
||||
10
api/src/graphql/schema.ts
Normal file
10
api/src/graphql/schema.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { makeExecutableSchema } from '@graphql-tools/schema';
|
||||
import { resolvers } from '@app/graphql/resolvers/resolvers';
|
||||
import { typeDefs } from '@app/graphql/schema/index';
|
||||
|
||||
const baseSchema = makeExecutableSchema({
|
||||
typeDefs: typeDefs,
|
||||
resolvers,
|
||||
});
|
||||
|
||||
export const schema = (baseSchema);
|
||||
42
api/src/graphql/schema/types/apikeys/apikey.graphql
Normal file
42
api/src/graphql/schema/types/apikeys/apikey.graphql
Normal file
@@ -0,0 +1,42 @@
|
||||
input authenticateInput {
|
||||
password: String!
|
||||
}
|
||||
|
||||
input addApiKeyInput {
|
||||
name: String
|
||||
key: String
|
||||
userId: String
|
||||
}
|
||||
|
||||
input updateApikeyInput {
|
||||
description: String
|
||||
expiresAt: Long!
|
||||
}
|
||||
|
||||
type Query {
|
||||
"""Get all API keys"""
|
||||
apiKeys: [ApiKey]
|
||||
}
|
||||
|
||||
type Mutation {
|
||||
"""Get an existing API key"""
|
||||
getApiKey(name: String!, input: authenticateInput): ApiKey
|
||||
|
||||
"""Create a new API key"""
|
||||
addApikey(name: String!, input: updateApikeyInput): ApiKey
|
||||
|
||||
"""Update an existing API key"""
|
||||
updateApikey(name: String!, input: updateApikeyInput): ApiKey
|
||||
}
|
||||
|
||||
type Subscription {
|
||||
apikeys: [ApiKey]
|
||||
}
|
||||
|
||||
type ApiKey {
|
||||
name: String!
|
||||
key: String!
|
||||
description: String
|
||||
scopes: JSON!
|
||||
expiresAt: Long!
|
||||
}
|
||||
@@ -5,14 +5,14 @@ type Query {
|
||||
|
||||
type Mutation {
|
||||
"""Start array"""
|
||||
startArray: Array @func(module: "updateArray", data: { state: "start" })
|
||||
startArray: Array
|
||||
"""Stop array"""
|
||||
stopArray: Array @func(module: "updateArray", data: { state: "stop" })
|
||||
stopArray: Array
|
||||
|
||||
"""Add new disk to array"""
|
||||
addDiskToArray(input: arrayDiskInput): Array @func(module: "addDiskToArray")
|
||||
addDiskToArray(input: arrayDiskInput): Array
|
||||
"""Remove existing disk from array. NOTE: The array must be stopped before running this otherwise it'll throw an error."""
|
||||
removeDiskFromArray(input: arrayDiskInput): Array @func(module: "removeDiskFromArray")
|
||||
removeDiskFromArray(input: arrayDiskInput): Array
|
||||
|
||||
mountArrayDisk(id: ID!): Disk
|
||||
unmountArrayDisk(id: ID!): Disk
|
||||
|
||||
26
api/src/graphql/schema/types/array/parity.graphql
Normal file
26
api/src/graphql/schema/types/array/parity.graphql
Normal file
@@ -0,0 +1,26 @@
|
||||
type Query {
|
||||
parityHistory: [ParityCheck]
|
||||
}
|
||||
|
||||
type Mutation {
|
||||
"""Start parity check"""
|
||||
startParityCheck(correct: Boolean): JSON
|
||||
"""Pause parity check"""
|
||||
pauseParityCheck: JSON
|
||||
"""Resume parity check"""
|
||||
resumeParityCheck: JSON
|
||||
"""Cancel parity check"""
|
||||
cancelParityCheck: JSON
|
||||
}
|
||||
|
||||
type Subscription {
|
||||
parityHistory: ParityCheck!
|
||||
}
|
||||
|
||||
type ParityCheck {
|
||||
date: String!
|
||||
duration: Int!
|
||||
speed: String!
|
||||
status: String!
|
||||
errors: String!
|
||||
}
|
||||
28
api/src/graphql/schema/types/base.graphql
Normal file
28
api/src/graphql/schema/types/base.graphql
Normal file
@@ -0,0 +1,28 @@
|
||||
scalar JSON
|
||||
scalar Long
|
||||
scalar UUID
|
||||
scalar DateTime
|
||||
scalar Port
|
||||
|
||||
type Welcome {
|
||||
message: String!
|
||||
}
|
||||
|
||||
type Query {
|
||||
# This should always be available even for guest users
|
||||
online: Boolean
|
||||
info: Info
|
||||
}
|
||||
|
||||
type Mutation {
|
||||
login(username: String!, password: String!): String
|
||||
sendNotification(notification: NotificationInput!): Notification
|
||||
shutdown: String
|
||||
reboot: String
|
||||
}
|
||||
|
||||
type Subscription {
|
||||
ping: String!
|
||||
info: Info!
|
||||
online: Boolean!
|
||||
}
|
||||
65
api/src/graphql/schema/types/disks/disk.graphql
Normal file
65
api/src/graphql/schema/types/disks/disk.graphql
Normal file
@@ -0,0 +1,65 @@
|
||||
type Query {
|
||||
"""Single disk"""
|
||||
disk(id: ID!): Disk
|
||||
"""Mulitiple disks"""
|
||||
disks: [Disk]!
|
||||
}
|
||||
type Disk {
|
||||
# /dev/sdb
|
||||
device: String!
|
||||
# SSD
|
||||
type: String!
|
||||
# Samsung_SSD_860_QVO_1TB
|
||||
name: String!
|
||||
# Samsung
|
||||
vendor: String!
|
||||
# 1000204886016
|
||||
size: Long!
|
||||
# -1
|
||||
bytesPerSector: Long!
|
||||
# -1
|
||||
totalCylinders: Long!
|
||||
# -1
|
||||
totalHeads: Long!
|
||||
# -1
|
||||
totalSectors: Long!
|
||||
# -1
|
||||
totalTracks: Long!
|
||||
# -1
|
||||
tracksPerCylinder: Long!
|
||||
# -1
|
||||
sectorsPerTrack: Long!
|
||||
# 1B6Q
|
||||
firmwareRevision: String!
|
||||
# S4CZNF0M807232N
|
||||
serialNum: String!
|
||||
interfaceType: DiskInterfaceType!
|
||||
smartStatus: DiskSmartStatus!
|
||||
temperature: Long!
|
||||
partitions: [DiskPartition!]
|
||||
}
|
||||
|
||||
type DiskPartition {
|
||||
name: String!
|
||||
fsType: DiskFsType!
|
||||
size: Long!
|
||||
}
|
||||
|
||||
enum DiskFsType {
|
||||
xfs
|
||||
btrfs
|
||||
vfat
|
||||
}
|
||||
|
||||
enum DiskInterfaceType {
|
||||
SAS
|
||||
SATA
|
||||
USB
|
||||
PCIe
|
||||
UNKNOWN
|
||||
}
|
||||
|
||||
enum DiskSmartStatus {
|
||||
OK
|
||||
UNKNOWN
|
||||
}
|
||||
29
api/src/graphql/schema/types/docker/network.graphql
Normal file
29
api/src/graphql/schema/types/docker/network.graphql
Normal file
@@ -0,0 +1,29 @@
|
||||
type Query {
|
||||
"""Docker network"""
|
||||
dockerNetwork(id: ID!): DockerNetwork!
|
||||
"""All Docker networks"""
|
||||
dockerNetworks(all: Boolean): [DockerNetwork]!
|
||||
}
|
||||
|
||||
type Subscription {
|
||||
dockerNetwork(id: ID!): DockerNetwork!
|
||||
dockerNetworks: [DockerNetwork]!
|
||||
}
|
||||
|
||||
type DockerNetwork {
|
||||
name: String
|
||||
id: ID
|
||||
created: String
|
||||
scope: String
|
||||
driver: String
|
||||
enableIPv6: Boolean!
|
||||
ipam: JSON
|
||||
internal: Boolean!
|
||||
attachable: Boolean!
|
||||
ingress: Boolean!
|
||||
configFrom: JSON
|
||||
configOnly: Boolean!
|
||||
containers: JSON
|
||||
options: JSON
|
||||
labels: JSON
|
||||
}
|
||||
34
api/src/graphql/schema/types/servers/server.graphql
Normal file
34
api/src/graphql/schema/types/servers/server.graphql
Normal file
@@ -0,0 +1,34 @@
|
||||
type Query {
|
||||
server: Server
|
||||
servers: [Server!]!
|
||||
}
|
||||
|
||||
type Subscription {
|
||||
server: Server
|
||||
}
|
||||
|
||||
enum ServerStatus {
|
||||
online
|
||||
offline
|
||||
never_connected
|
||||
}
|
||||
|
||||
|
||||
type ProfileModel {
|
||||
userId: ID
|
||||
username: String
|
||||
url: String
|
||||
avatar: String
|
||||
}
|
||||
|
||||
type Server {
|
||||
owner: ProfileModel!
|
||||
guid: String!
|
||||
apikey: String!
|
||||
name: String!
|
||||
status: ServerStatus!
|
||||
wanip: String!
|
||||
lanip: String!
|
||||
localurl: String!
|
||||
remoteurl: String!
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
type Query {
|
||||
"""Network Shares"""
|
||||
shares: [Share] @func(module: "getAllShares")
|
||||
shares: [Share]
|
||||
}
|
||||
|
||||
type Subscription {
|
||||
|
||||
@@ -0,0 +1,62 @@
|
||||
type Query {
|
||||
unassignedDevices: [UnassignedDevice]
|
||||
}
|
||||
|
||||
type Subscription {
|
||||
unassignedDevices: [UnassignedDevice!]
|
||||
}
|
||||
|
||||
type UnassignedDevice {
|
||||
devlinks: String
|
||||
devname: String
|
||||
devpath: String
|
||||
devtype: String
|
||||
idAta: String
|
||||
idAtaDownloadMicrocode: String
|
||||
idAtaFeatureSetAam: String
|
||||
idAtaFeatureSetAamCurrentValue: String
|
||||
idAtaFeatureSetAamEnabled: String
|
||||
idAtaFeatureSetAamVendorRecommendedValue: String
|
||||
idAtaFeatureSetApm: String
|
||||
idAtaFeatureSetApmCurrentValue: String
|
||||
idAtaFeatureSetApmEnabled: String
|
||||
idAtaFeatureSetHpa: String
|
||||
idAtaFeatureSetHpaEnabled: String
|
||||
idAtaFeatureSetPm: String
|
||||
idAtaFeatureSetPmEnabled: String
|
||||
idAtaFeatureSetPuis: String
|
||||
idAtaFeatureSetPuisEnabled: String
|
||||
idAtaFeatureSetSecurity: String
|
||||
idAtaFeatureSetSecurityEnabled: String
|
||||
idAtaFeatureSetSecurityEnhancedEraseUnitMin: String
|
||||
idAtaFeatureSetSecurityEraseUnitMin: String
|
||||
idAtaFeatureSetSmart: String
|
||||
idAtaFeatureSetSmartEnabled: String
|
||||
idAtaRotationRateRpm: String
|
||||
idAtaSata: String
|
||||
idAtaSataSignalRateGen1: String
|
||||
idAtaSataSignalRateGen2: String
|
||||
idAtaWriteCache: String
|
||||
idAtaWriteCacheEnabled: String
|
||||
idBus: String
|
||||
idModel: String
|
||||
idModelEnc: String
|
||||
idPartTableType: String
|
||||
idPath: String
|
||||
idPathTag: String
|
||||
idRevision: String
|
||||
idSerial: String
|
||||
idSerialShort: String
|
||||
idType: String
|
||||
idWwn: String
|
||||
idWwnWithExtension: String
|
||||
major: String
|
||||
minor: String
|
||||
subsystem: String
|
||||
usecInitialized: String
|
||||
partitions: [Partition]
|
||||
temp: Int
|
||||
name: String
|
||||
mounted: Boolean
|
||||
mount: Mount
|
||||
}
|
||||
17
api/src/graphql/schema/types/users/me.graphql
Normal file
17
api/src/graphql/schema/types/users/me.graphql
Normal file
@@ -0,0 +1,17 @@
|
||||
type Query {
|
||||
"""Current user account"""
|
||||
me: Me
|
||||
}
|
||||
|
||||
"""The current user"""
|
||||
type Me implements UserAccount {
|
||||
id: ID!
|
||||
name: String!
|
||||
description: String!
|
||||
roles: String!
|
||||
permissions: JSON
|
||||
}
|
||||
|
||||
type Subscription {
|
||||
me: Me
|
||||
}
|
||||
50
api/src/graphql/schema/types/users/user.graphql
Normal file
50
api/src/graphql/schema/types/users/user.graphql
Normal file
@@ -0,0 +1,50 @@
|
||||
interface UserAccount {
|
||||
id: ID!
|
||||
name: String!
|
||||
description: String!
|
||||
roles: String!
|
||||
}
|
||||
|
||||
input usersInput {
|
||||
slim: Boolean
|
||||
}
|
||||
|
||||
type Query {
|
||||
"""User account"""
|
||||
user(id: ID!): User
|
||||
"""User accounts"""
|
||||
users(input: usersInput): [User!]!
|
||||
}
|
||||
|
||||
input addUserInput {
|
||||
name: String!
|
||||
password: String!
|
||||
description: String
|
||||
}
|
||||
|
||||
input deleteUserInput {
|
||||
name: String!
|
||||
}
|
||||
|
||||
type Mutation {
|
||||
"""Add a new user"""
|
||||
addUser(input: addUserInput!): User
|
||||
"""Delete a user"""
|
||||
deleteUser(input: deleteUserInput!): User
|
||||
}
|
||||
|
||||
type Subscription {
|
||||
user(id: ID!): User!
|
||||
users: [User]!
|
||||
}
|
||||
|
||||
"""A local user account"""
|
||||
type User implements UserAccount {
|
||||
id: ID!
|
||||
"""A unique name for the user"""
|
||||
name: String!
|
||||
description: String!
|
||||
roles: String!
|
||||
"""If the account has a password set"""
|
||||
password: Boolean
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
type Query {
|
||||
vars: Vars @func(module: "getVars")
|
||||
vars: Vars
|
||||
}
|
||||
|
||||
type Subscription {
|
||||
|
||||
@@ -39,7 +39,7 @@ export const createSubscription = (channel: string, resource?: string) => ({
|
||||
});
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const getLocalServer = (getState = store.getState): Array<Server> => {
|
||||
export const getLocalServer = (getState = store.getState): Array<Server> => {
|
||||
const { emhttp, config, minigraph } = getState();
|
||||
const guid = emhttp.var.regGuid;
|
||||
const { name } = emhttp.var;
|
||||
@@ -58,7 +58,7 @@ const getLocalServer = (getState = store.getState): Array<Server> => {
|
||||
},
|
||||
guid,
|
||||
apikey: config.remote.apikey ?? '',
|
||||
name,
|
||||
name: name ?? 'Local Server',
|
||||
status:
|
||||
minigraph.status === MinigraphStatus.CONNECTED
|
||||
? ServerStatus.ONLINE
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
import { mergeTypeDefs } from '@graphql-tools/merge';
|
||||
import { gql } from 'graphql-tag';
|
||||
import { typeDefs } from '@app/graphql/schema/index';
|
||||
|
||||
export const baseTypes = [
|
||||
gql`
|
||||
scalar JSON
|
||||
scalar Long
|
||||
scalar UUID
|
||||
scalar DateTime
|
||||
scalar Port
|
||||
|
||||
directive @subscription(channel: String!) on FIELD_DEFINITION
|
||||
|
||||
type Welcome {
|
||||
message: String!
|
||||
}
|
||||
|
||||
type Query {
|
||||
# This should always be available even for guest users
|
||||
welcome: Welcome @func(module: "getWelcome")
|
||||
online: Boolean
|
||||
info: Info
|
||||
}
|
||||
|
||||
type Mutation {
|
||||
login(username: String!, password: String!): String
|
||||
sendNotification(notification: NotificationInput!): Notification
|
||||
shutdown: String
|
||||
reboot: String
|
||||
}
|
||||
|
||||
type Subscription {
|
||||
ping: String!
|
||||
info: Info!
|
||||
online: Boolean!
|
||||
}
|
||||
`,
|
||||
];
|
||||
|
||||
export const types = mergeTypeDefs([
|
||||
...baseTypes,
|
||||
typeDefs,
|
||||
]);
|
||||
|
||||
export default types;
|
||||
@@ -12,20 +12,23 @@ import { loadStateFiles } from '@app/store/modules/emhttp';
|
||||
import { StateManager } from '@app/store/watch/state-watch';
|
||||
import { setupRegistrationKeyWatch } from '@app/store/watch/registration-watch';
|
||||
import { loadRegistrationKey } from '@app/store/modules/registration';
|
||||
import { createApolloExpressServer } from '@app/server';
|
||||
import { unlinkSync } from 'fs';
|
||||
import { fileExistsSync } from '@app/core/utils/files/file-exists';
|
||||
import { PORT, environment } from '@app/environment';
|
||||
import { shutdownApiEvent } from '@app/store/actions/shutdown-api-event';
|
||||
import { PingTimeoutJobs } from '@app/mothership/jobs/ping-timeout-jobs';
|
||||
import { type BaseContext, type ApolloServer } from '@apollo/server';
|
||||
import { setupDynamixConfigWatch } from '@app/store/watch/dynamix-config-watch';
|
||||
import { setupVarRunWatch } from '@app/store/watch/var-run-watch';
|
||||
import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-file';
|
||||
import { startMiddlewareListeners } from '@app/store/listeners/listener-middleware';
|
||||
import { validateApiKeyIfPresent } from '@app/store/listeners/api-key-listener';
|
||||
import { bootstrapNestServer } from '@app/unraid-api/main';
|
||||
import { type NestFastifyApplication } from '@nestjs/platform-fastify';
|
||||
import { type RawServerDefault } from 'fastify';
|
||||
import { setupLogRotation } from '@app/core/logrotate/setup-logrotate';
|
||||
import * as env from '@app/environment';
|
||||
|
||||
let server: ApolloServer<BaseContext>;
|
||||
let server: NestFastifyApplication<RawServerDefault>;
|
||||
|
||||
const unlinkUnixPort = () => {
|
||||
if (isNaN(parseInt(PORT, 10))) {
|
||||
@@ -36,6 +39,9 @@ const unlinkUnixPort = () => {
|
||||
void am(
|
||||
async () => {
|
||||
environment.IS_MAIN_PROCESS = true;
|
||||
|
||||
logger.debug('ENV %o', env);
|
||||
|
||||
const cacheable = new CacheableLookup();
|
||||
|
||||
Object.assign(global, { WebSocket: require('ws') });
|
||||
@@ -47,6 +53,8 @@ void am(
|
||||
// Must occur before config is loaded to ensure that the handler can fix broken configs
|
||||
await startStoreSync();
|
||||
|
||||
await setupLogRotation();
|
||||
|
||||
// Load my servers config file into store
|
||||
await store.dispatch(loadConfigFile());
|
||||
|
||||
@@ -78,8 +86,7 @@ void am(
|
||||
unlinkUnixPort();
|
||||
|
||||
// Start webserver
|
||||
server = await createApolloExpressServer();
|
||||
|
||||
server = await bootstrapNestServer();
|
||||
PingTimeoutJobs.init();
|
||||
|
||||
startMiddlewareListeners();
|
||||
@@ -88,6 +95,7 @@ void am(
|
||||
|
||||
// On process exit stop HTTP server - this says it supports async but it doesnt seem to
|
||||
exitHook(() => {
|
||||
server?.close?.();
|
||||
// If port is unix socket, delete socket before exiting
|
||||
unlinkUnixPort();
|
||||
|
||||
@@ -96,14 +104,10 @@ void am(
|
||||
});
|
||||
},
|
||||
async (error: NodeJS.ErrnoException) => {
|
||||
// Log error to syslog
|
||||
logger.error('API-GLOBAL-ERROR', error);
|
||||
logger.error('API-GLOBAL-ERROR %s %s', error.message, error.stack);
|
||||
shutdownApiEvent();
|
||||
|
||||
// Stop server
|
||||
logger.debug('Stopping HTTP server');
|
||||
if (server) {
|
||||
await server.stop();
|
||||
await server?.close?.();
|
||||
}
|
||||
|
||||
// Kill application
|
||||
|
||||
@@ -12,7 +12,7 @@ import { type Response } from 'got';
|
||||
export const validateApiKeyWithKeyServer = async ({ flashGuid, apiKey }: { flashGuid: string; apiKey: string }): Promise<API_KEY_STATUS> => {
|
||||
// If we're still loading config state, just return the config is loading
|
||||
|
||||
ksLog.log('Validating API Key with KeyServer');
|
||||
ksLog.info('Validating API Key with KeyServer');
|
||||
|
||||
// Send apiKey, etc. to key-server for verification
|
||||
let response: Response<string>;
|
||||
@@ -22,9 +22,7 @@ export const validateApiKeyWithKeyServer = async ({ flashGuid, apiKey }: { flash
|
||||
apikey: apiKey,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
ksLog.addContext('networkError', error);
|
||||
ksLog.error('Caught error reaching Key Server');
|
||||
ksLog.removeContext('networkError');
|
||||
ksLog.error({ error }, 'Caught error reaching Key Server');
|
||||
|
||||
return API_KEY_STATUS.NETWORK_ERROR;
|
||||
}
|
||||
|
||||
@@ -33,7 +33,7 @@ const getWebsocketWithMothershipHeaders = () => {
|
||||
headers: getMothershipWebsocketHeaders(),
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const delayFn = buildDelayFunction({
|
||||
@@ -89,7 +89,7 @@ export class GraphQLClient {
|
||||
const isStateValid = isAPIStateDataFullyLoaded() && isApiKeyValid();
|
||||
|
||||
if (!GraphQLClient.instance && isStateValid) {
|
||||
minigraphLogger.debug("Creating a new Apollo Client Instance");
|
||||
minigraphLogger.debug('Creating a new Apollo Client Instance');
|
||||
GraphQLClient.instance = GraphQLClient.createGraphqlClient();
|
||||
}
|
||||
|
||||
@@ -128,10 +128,10 @@ export class GraphQLClient {
|
||||
logoutUser({ reason: 'Invalid API Key on Mothership' })
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
const getDelay = delayFn(count);
|
||||
store.dispatch(setMothershipTimeout(getDelay));
|
||||
minigraphLogger.info('Delay currently is', getDelay);
|
||||
minigraphLogger.info('Delay currently is: %i', getDelay);
|
||||
return getDelay;
|
||||
},
|
||||
attempts: { max: Infinity },
|
||||
|
||||
56
api/src/mothership/jobs/token-refresh-jobs.ts
Normal file
56
api/src/mothership/jobs/token-refresh-jobs.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import { OAUTH_CLIENT_ID, OAUTH_OPENID_CONFIGURATION_URL } from '@app/consts';
|
||||
import { mothershipLogger } from '@app/core';
|
||||
import { getters, store } from '@app/store';
|
||||
import { updateAccessTokens } from '@app/store/modules/config';
|
||||
import { Cron, Expression, Initializer } from '@reflet/cron';
|
||||
import { Issuer } from 'openid-client';
|
||||
|
||||
export class TokenRefresh extends Initializer<typeof TokenRefresh> {
|
||||
private issuer: Issuer | null = null;
|
||||
|
||||
@Cron.PreventOverlap
|
||||
@Cron(Expression.EVERY_DAY_AT_NOON)
|
||||
@Cron.RunOnInit
|
||||
async getNewTokens() {
|
||||
const {
|
||||
remote: { refreshtoken },
|
||||
} = getters.config();
|
||||
|
||||
if (!refreshtoken) {
|
||||
mothershipLogger.debug('No JWT refresh token configured');
|
||||
return;
|
||||
}
|
||||
|
||||
if (!this.issuer) {
|
||||
try {
|
||||
this.issuer = await Issuer.discover(
|
||||
OAUTH_OPENID_CONFIGURATION_URL
|
||||
);
|
||||
|
||||
mothershipLogger.trace(
|
||||
'Discovered Issuer %s',
|
||||
this.issuer.issuer
|
||||
);
|
||||
} catch (error: unknown) {
|
||||
mothershipLogger.error({ error }, 'Failed to discover issuer');
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const client = new this.issuer.Client({
|
||||
client_id: OAUTH_CLIENT_ID,
|
||||
token_endpoint_auth_method: 'none',
|
||||
});
|
||||
|
||||
const newTokens = await client.refresh(refreshtoken);
|
||||
mothershipLogger.debug('tokens %o', newTokens);
|
||||
if (newTokens.access_token && newTokens.id_token) {
|
||||
store.dispatch(
|
||||
updateAccessTokens({
|
||||
accesstoken: newTokens.access_token,
|
||||
idtoken: newTokens.id_token,
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
135
api/src/mothership/subscribe-to-mothership.ts
Normal file
135
api/src/mothership/subscribe-to-mothership.ts
Normal file
@@ -0,0 +1,135 @@
|
||||
/* eslint-disable max-depth */
|
||||
import { minigraphLogger, mothershipLogger } from '@app/core/log';
|
||||
import { GraphQLClient } from './graphql-client';
|
||||
import { store } from '@app/store';
|
||||
import {
|
||||
startDashboardProducer,
|
||||
stopDashboardProducer,
|
||||
} from '@app/store/modules/dashboard';
|
||||
|
||||
import {
|
||||
EVENTS_SUBSCRIPTION,
|
||||
RemoteAccess_Fragment,
|
||||
RemoteGraphQL_Fragment,
|
||||
} from '@app/graphql/mothership/subscriptions';
|
||||
|
||||
import { ClientType } from '@app/graphql/generated/client/graphql';
|
||||
import { notNull } from '@app/utils';
|
||||
import { handleRemoteAccessEvent } from '@app/store/actions/handle-remote-access-event';
|
||||
import { useFragment } from '@app/graphql/generated/client/fragment-masking';
|
||||
import { handleRemoteGraphQLEvent } from '@app/store/actions/handle-remote-graphql-event';
|
||||
import {
|
||||
setSelfDisconnected,
|
||||
setSelfReconnected,
|
||||
} from '@app/store/modules/minigraph';
|
||||
|
||||
export const subscribeToEvents = async (apiKey: string) => {
|
||||
minigraphLogger.info('Subscribing to Events');
|
||||
const client = GraphQLClient.getInstance();
|
||||
if (!client) {
|
||||
throw new Error('Unable to use client - state must not be loaded');
|
||||
}
|
||||
|
||||
const eventsSub = client.subscribe({
|
||||
query: EVENTS_SUBSCRIPTION,
|
||||
fetchPolicy: 'no-cache',
|
||||
});
|
||||
eventsSub.subscribe(async ({ data, errors }) => {
|
||||
if (errors) {
|
||||
mothershipLogger.error(
|
||||
'GraphQL Error with events subscription: %s',
|
||||
errors.join(',')
|
||||
);
|
||||
} else if (data) {
|
||||
mothershipLogger.trace({ events: data.events }, 'Got events from mothership');
|
||||
|
||||
for (const event of data.events?.filter(notNull) ?? []) {
|
||||
switch (event.__typename) {
|
||||
case 'ClientConnectedEvent': {
|
||||
const {
|
||||
connectedData: { type, apiKey: eventApiKey },
|
||||
} = event;
|
||||
// Another server connected to Mothership
|
||||
if (type === ClientType.API) {
|
||||
if (eventApiKey === apiKey) {
|
||||
// We are online, clear timeout waiting if it's set
|
||||
store.dispatch(setSelfReconnected());
|
||||
}
|
||||
}
|
||||
|
||||
// Dashboard Connected to Mothership
|
||||
|
||||
if (
|
||||
type === ClientType.DASHBOARD &&
|
||||
apiKey === eventApiKey
|
||||
) {
|
||||
store.dispatch(startDashboardProducer());
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 'ClientDisconnectedEvent': {
|
||||
const {
|
||||
disconnectedData: { type, apiKey: eventApiKey },
|
||||
} = event;
|
||||
// Server Disconnected From Mothership
|
||||
if (type === ClientType.API) {
|
||||
if (eventApiKey === apiKey) {
|
||||
store.dispatch(setSelfDisconnected());
|
||||
}
|
||||
}
|
||||
|
||||
// The dashboard was closed or went idle
|
||||
|
||||
if (
|
||||
type === ClientType.DASHBOARD &&
|
||||
apiKey === eventApiKey
|
||||
) {
|
||||
store.dispatch(stopDashboardProducer());
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 'RemoteAccessEvent': {
|
||||
const eventAsRemoteAccessEvent = useFragment(
|
||||
RemoteAccess_Fragment,
|
||||
event
|
||||
);
|
||||
|
||||
if (eventAsRemoteAccessEvent.data.apiKey === apiKey) {
|
||||
void store.dispatch(
|
||||
handleRemoteAccessEvent(
|
||||
eventAsRemoteAccessEvent
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 'RemoteGraphQLEvent': {
|
||||
const eventAsRemoteGraphQLEvent = useFragment(
|
||||
RemoteGraphQL_Fragment,
|
||||
event
|
||||
);
|
||||
// No need to check API key here anymore
|
||||
|
||||
void store.dispatch(
|
||||
handleRemoteGraphQLEvent(eventAsRemoteGraphQLEvent)
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'UpdateEvent': {
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
25
api/src/mothership/utils/delay-function.ts
Normal file
25
api/src/mothership/utils/delay-function.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import { type DelayFunctionOptions } from '@apollo/client/link/retry/delayFunction';
|
||||
|
||||
export function buildDelayFunction(
|
||||
delayOptions?: DelayFunctionOptions,
|
||||
): (count: number) => number {
|
||||
const { initial = 10_000, jitter = true, max = Infinity } = delayOptions ?? {};
|
||||
// If we're jittering, baseDelay is half of the maximum delay for that
|
||||
// attempt (and is, on average, the delay we will encounter).
|
||||
// If we're not jittering, adjust baseDelay so that the first attempt
|
||||
// lines up with initialDelay, for everyone's sanity.
|
||||
const baseDelay = jitter ? initial : initial / 2;
|
||||
|
||||
return (count: number) => {
|
||||
// eslint-disable-next-line no-mixed-operators
|
||||
let delay = Math.min(max, baseDelay * 2 ** count);
|
||||
if (jitter) {
|
||||
// We opt for a full jitter approach for a mostly uniform distribution,
|
||||
// but bound it within initialDelay and delay for everyone's sanity.
|
||||
// eslint-disable-next-line operator-assignment
|
||||
delay = Math.random() * delay;
|
||||
}
|
||||
|
||||
return Math.round(delay);
|
||||
};
|
||||
}
|
||||
@@ -1,76 +0,0 @@
|
||||
import { type NextFunction, type Request, type Response } from 'express';
|
||||
import { logger } from '@app/core';
|
||||
import { getAllowedOrigins } from '@app/common/allowed-origins';
|
||||
import { LOG_CORS } from '@app/environment';
|
||||
|
||||
const getOriginGraphqlError = () => ({
|
||||
data: null,
|
||||
errors: [
|
||||
{
|
||||
message:
|
||||
'The CORS policy for this site does not allow access from the specified Origin.',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
/**
|
||||
* Middleware to check a users origin and send a GraphQL error if they are not using a valid one
|
||||
* @param req Express Request
|
||||
* @param res Express Response
|
||||
* @param next Express NextFunction
|
||||
* @returns void
|
||||
*/
|
||||
export const originMiddleware = (
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): void => {
|
||||
if (req.method === 'GET' && req.query.apiKey && !req.headers.origin) {
|
||||
// Bypass GET request headers on requests to the log endpoint
|
||||
return next();
|
||||
}
|
||||
// Dev Mode Bypass
|
||||
const origin = req.get('Origin')?.toLowerCase() ?? '';
|
||||
const allowedOrigins = getAllowedOrigins();
|
||||
|
||||
if (process.env.BYPASS_CORS_CHECKS === 'true') {
|
||||
logger.addContext('cors', allowedOrigins);
|
||||
logger.warn(`BYPASSING_CORS_CHECK: %o`, req.headers);
|
||||
logger.removeContext('cors');
|
||||
next();
|
||||
return;
|
||||
} else {
|
||||
if (LOG_CORS) {
|
||||
logger.addContext('origins', allowedOrigins.join(', '));
|
||||
logger.trace(`Current Origin: ${origin ?? 'undefined'}`);
|
||||
logger.removeContext('origins');
|
||||
}
|
||||
}
|
||||
|
||||
// Disallow requests with no origin
|
||||
// (like mobile apps, curl requests or viewing /graphql directly)
|
||||
if (!origin) {
|
||||
logger.debug('No origin provided, denying CORS!');
|
||||
res.status(403).send(getOriginGraphqlError());
|
||||
return;
|
||||
}
|
||||
|
||||
if (LOG_CORS) {
|
||||
logger.trace(`📒 Checking "${origin}" for CORS access.`);
|
||||
}
|
||||
|
||||
// Only allow known origins
|
||||
if (!allowedOrigins.includes(origin)) {
|
||||
logger.error(
|
||||
'❌ %s is not in the allowed origins list, denying CORS!',
|
||||
origin
|
||||
);
|
||||
res.status(403).send(getOriginGraphqlError());
|
||||
return;
|
||||
}
|
||||
|
||||
if (LOG_CORS) {
|
||||
logger.trace('✔️ Origin check passed, granting CORS!');
|
||||
}
|
||||
next();
|
||||
};
|
||||
@@ -1,355 +0,0 @@
|
||||
import path from 'path';
|
||||
import cors from 'cors';
|
||||
import { watch } from 'chokidar';
|
||||
import express, { json, type Request, type Response } from 'express';
|
||||
import http from 'http';
|
||||
import { ApolloServer } from '@apollo/server';
|
||||
import { expressMiddleware } from '@apollo/server/express4';
|
||||
import { ApolloServerPluginDrainHttpServer } from '@apollo/server/plugin/drainHttpServer';
|
||||
import { logger, pubsub, graphqlLogger } from '@app/core';
|
||||
import { verifyTwoFactorToken } from '@app/common/two-factor';
|
||||
import display from '@app/graphql/resolvers/query/display';
|
||||
import { getters } from '@app/store';
|
||||
import { schema } from '@app/graphql/schema';
|
||||
import { execute, subscribe } from 'graphql';
|
||||
import { GRAPHQL_WS, SubscriptionServer } from 'subscriptions-transport-ws';
|
||||
import { wsHasConnected, wsHasDisconnected } from '@app/ws';
|
||||
import { apiKeyToUser } from '@app/graphql';
|
||||
import { randomUUID } from 'crypto';
|
||||
import { getServerAddress } from '@app/common/get-server-address';
|
||||
import { originMiddleware } from '@app/originMiddleware';
|
||||
import { API_VERSION, GRAPHQL_INTROSPECTION, PORT } from '@app/environment';
|
||||
import {
|
||||
getBannerPathIfPresent,
|
||||
getCasePathIfPresent,
|
||||
} from '@app/core/utils/images/image-file-helpers';
|
||||
import { WebSocketServer } from 'ws';
|
||||
import { useServer } from 'graphql-ws/lib/use/ws';
|
||||
import { GRAPHQL_TRANSPORT_WS_PROTOCOL } from 'graphql-ws';
|
||||
import { getLogs } from '@app/graphql/express/get-logs';
|
||||
|
||||
const configFilePath = path.join(
|
||||
getters.paths()['dynamix-base'],
|
||||
'case-model.cfg'
|
||||
);
|
||||
const customImageFilePath = path.join(
|
||||
getters.paths()['dynamix-base'],
|
||||
'case-model.png'
|
||||
);
|
||||
|
||||
const updatePubsub = async () => {
|
||||
await pubsub.publish('display', {
|
||||
display: await display(),
|
||||
});
|
||||
};
|
||||
|
||||
// Update pub/sub when config/image file is added/updated/removed
|
||||
watch(configFilePath).on('all', updatePubsub);
|
||||
watch(customImageFilePath).on('all', updatePubsub);
|
||||
|
||||
export const createApolloExpressServer = async () => {
|
||||
// Try and load the HTTP server
|
||||
graphqlLogger.debug('Starting HTTP server');
|
||||
const app = express();
|
||||
const httpServer = http.createServer(app);
|
||||
|
||||
app.use(json());
|
||||
|
||||
// Cors
|
||||
app.use(cors());
|
||||
app.use(originMiddleware);
|
||||
|
||||
// Add Unraid API version header
|
||||
app.use(async (_req, res, next) => {
|
||||
// Only get the machine ID on first request
|
||||
// We do this to avoid using async in the main server function
|
||||
if (!app.get('x-unraid-api-version')) {
|
||||
app.set('x-unraid-api-version', API_VERSION);
|
||||
}
|
||||
|
||||
// Update header with unraid API version
|
||||
res.set('x-unraid-api-version', app.get('x-unraid-api-version'));
|
||||
|
||||
next();
|
||||
});
|
||||
|
||||
// Log only if the server actually binds to the port
|
||||
httpServer.on('listening', () => {
|
||||
logger.info('Server is up! %s', getServerAddress(httpServer));
|
||||
});
|
||||
|
||||
// graphql-ws
|
||||
const graphqlWs = new WebSocketServer({ noServer: true });
|
||||
|
||||
// subscriptions-transport-ws
|
||||
const subTransWs = new WebSocketServer({
|
||||
noServer: true,
|
||||
});
|
||||
|
||||
// graphql-ws setup
|
||||
const graphqlWsServer = useServer<
|
||||
{ 'x-api-key': string },
|
||||
{ context: { user: any; websocketId: string } }
|
||||
>(
|
||||
{
|
||||
schema,
|
||||
onError(ctx, message, errors) {
|
||||
logger.debug('%o %o %o', ctx, message, errors);
|
||||
},
|
||||
async onConnect(ctx) {
|
||||
logger.debug(
|
||||
'Connecting new client with params: %o',
|
||||
ctx.connectionParams
|
||||
);
|
||||
const params: unknown = ctx.connectionParams?.['x-api-key'];
|
||||
if (params && typeof params === 'string') {
|
||||
const apiKey = params;
|
||||
const user = await apiKeyToUser(apiKey);
|
||||
const websocketId = randomUUID();
|
||||
logger.debug('User is %o', user);
|
||||
ctx.extra.context = { user, websocketId };
|
||||
return true;
|
||||
}
|
||||
return {};
|
||||
},
|
||||
context: (ctx) => {
|
||||
return ctx.extra.context;
|
||||
},
|
||||
},
|
||||
graphqlWs
|
||||
);
|
||||
|
||||
// subscriptions-transport-ws setup
|
||||
const subscriptionsTransportServer = SubscriptionServer.create(
|
||||
{
|
||||
// This is the `schema` we just created.
|
||||
schema,
|
||||
// These are imported from `graphql`.
|
||||
execute,
|
||||
subscribe,
|
||||
// Ensure keep-alive packets are sent
|
||||
keepAlive: 10_000,
|
||||
// Providing `onConnect` is the `SubscriptionServer` equivalent to the
|
||||
// `context` function in `ApolloServer`. Please [see the docs](https://github.com/apollographql/subscriptions-transport-ws#constructoroptions-socketoptions--socketserver)
|
||||
// for more information on this hook.
|
||||
async onConnect(connectionParams: { 'x-api-key': string }) {
|
||||
const apiKey = connectionParams['x-api-key'];
|
||||
const user = await apiKeyToUser(apiKey);
|
||||
const websocketId = randomUUID();
|
||||
|
||||
graphqlLogger.addContext('websocketId', websocketId);
|
||||
graphqlLogger.debug('%s connected', user.name);
|
||||
graphqlLogger.removeContext('websocketId');
|
||||
|
||||
// Update ws connection count and other needed values
|
||||
wsHasConnected(websocketId);
|
||||
|
||||
return {
|
||||
user,
|
||||
websocketId,
|
||||
};
|
||||
},
|
||||
async onDisconnect(
|
||||
_,
|
||||
websocketContext: {
|
||||
initPromise: Promise<
|
||||
| boolean
|
||||
| {
|
||||
user: {
|
||||
name: string;
|
||||
};
|
||||
websocketId: string;
|
||||
}
|
||||
>;
|
||||
}
|
||||
) {
|
||||
const context = await websocketContext.initPromise;
|
||||
|
||||
// The websocket has disconnected before init event has resolved
|
||||
// @see: https://github.com/apollographql/subscriptions-transport-ws/issues/349
|
||||
if (context === true || context === false) {
|
||||
// This seems to also happen if a tab is left open and then a server starts up
|
||||
// The tab hits the server over and over again without sending init
|
||||
graphqlLogger.debug('unknown disconnected');
|
||||
return;
|
||||
}
|
||||
|
||||
const { user, websocketId } = context;
|
||||
|
||||
graphqlLogger.addContext('websocketId', websocketId);
|
||||
graphqlLogger.debug('%s disconnected.', user.name);
|
||||
graphqlLogger.removeContext('websocketId');
|
||||
|
||||
// Update ws connection count and other needed values
|
||||
wsHasDisconnected(websocketId);
|
||||
},
|
||||
},
|
||||
subTransWs
|
||||
);
|
||||
|
||||
const apolloServerPluginOnExit = {
|
||||
async serverWillStart() {
|
||||
return {
|
||||
/**
|
||||
* When the app exits this will be run.
|
||||
*/
|
||||
async drainServer() {
|
||||
// Close all connections to subscriptions server
|
||||
subscriptionsTransportServer.close();
|
||||
graphqlWsServer.dispose();
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
// Create graphql instance
|
||||
const apolloServer = new ApolloServer({
|
||||
schema,
|
||||
plugins: [
|
||||
apolloServerPluginOnExit,
|
||||
ApolloServerPluginDrainHttpServer({ httpServer }),
|
||||
],
|
||||
introspection: GRAPHQL_INTROSPECTION,
|
||||
});
|
||||
|
||||
await apolloServer.start();
|
||||
|
||||
app.get('/graphql/api/logs', getLogs);
|
||||
|
||||
app.get(
|
||||
'/graphql/api/customizations/:type',
|
||||
async (req: Request, res: Response) => {
|
||||
// @TODO - Clean up this function
|
||||
const apiKey = req.headers['x-api-key'];
|
||||
if (
|
||||
apiKey &&
|
||||
typeof apiKey === 'string' &&
|
||||
(await apiKeyToUser(apiKey)).role !== 'guest'
|
||||
) {
|
||||
if (req.params.type === 'banner') {
|
||||
const path = await getBannerPathIfPresent();
|
||||
if (path) {
|
||||
res.sendFile(path);
|
||||
return;
|
||||
}
|
||||
} else if (req.params.type === 'case') {
|
||||
const path = await getCasePathIfPresent();
|
||||
if (path) {
|
||||
res.sendFile(path);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
return res
|
||||
.status(404)
|
||||
.send('no customization of this type found');
|
||||
}
|
||||
|
||||
return res.status(403).send('unauthorized');
|
||||
}
|
||||
);
|
||||
|
||||
app.use(
|
||||
'/graphql',
|
||||
cors(),
|
||||
json(),
|
||||
expressMiddleware(apolloServer, {
|
||||
context: async ({ req }) => {
|
||||
// Normal Websocket connection
|
||||
/* if (connection && Object.keys(connection.context).length >= 1) {
|
||||
// Check connection for metadata
|
||||
return {
|
||||
...connection.context,
|
||||
};
|
||||
} */
|
||||
|
||||
// Normal HTTP connection
|
||||
if (
|
||||
req &&
|
||||
req.headers['x-api-key'] &&
|
||||
typeof req.headers['x-api-key'] === 'string'
|
||||
) {
|
||||
const apiKey = req.headers['x-api-key'];
|
||||
const user = await apiKeyToUser(apiKey);
|
||||
|
||||
return {
|
||||
user,
|
||||
};
|
||||
}
|
||||
|
||||
throw new Error('Invalid API key');
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
httpServer.on('upgrade', (req, socket, head) => {
|
||||
// extract websocket subprotocol from header
|
||||
const protocol = req.headers['sec-websocket-protocol'];
|
||||
const protocols = Array.isArray(protocol)
|
||||
? protocol
|
||||
: protocol?.split(',').map((p) => p.trim());
|
||||
|
||||
// decide which websocket server to use
|
||||
const wss =
|
||||
protocols?.includes(GRAPHQL_WS) && // subscriptions-transport-ws subprotocol
|
||||
!protocols.includes(GRAPHQL_TRANSPORT_WS_PROTOCOL) // graphql-ws subprotocol
|
||||
? subTransWs
|
||||
: // graphql-ws will welcome its own subprotocol and
|
||||
// gracefully reject invalid ones. if the client supports
|
||||
// both transports, graphql-ws will prevail
|
||||
graphqlWs;
|
||||
wss.handleUpgrade(req, socket, head, (ws) => {
|
||||
wss.emit('connection', ws, req);
|
||||
});
|
||||
});
|
||||
|
||||
// List all endpoints at start of server
|
||||
app.get('/', (_, res: Response) => res.status(200).send('OK'));
|
||||
|
||||
app.post('/verify', async (req, res) => {
|
||||
try {
|
||||
// Check two-factor token is valid
|
||||
verifyTwoFactorToken(req.body?.username, req.body?.token);
|
||||
|
||||
// Success
|
||||
logger.debug('2FA token valid, allowing login.');
|
||||
|
||||
// Allow the user to pass
|
||||
res.sendStatus(204);
|
||||
return;
|
||||
} catch (error: unknown) {
|
||||
logger.addContext('error', error);
|
||||
logger.error('Failed validating 2FA token.');
|
||||
logger.removeContext('error');
|
||||
|
||||
// User failed verification
|
||||
res.status(401);
|
||||
res.send((error as Error).message);
|
||||
}
|
||||
});
|
||||
|
||||
// Handle errors by logging them and returning a 500.
|
||||
app.use(
|
||||
(
|
||||
error: Error & { stackTrace?: string; status?: number },
|
||||
_,
|
||||
res: Response,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
__
|
||||
) => {
|
||||
// Don't log CORS errors
|
||||
if (error.message.includes('CORS')) return;
|
||||
|
||||
logger.error(error);
|
||||
|
||||
if (error.stack) {
|
||||
error.stackTrace = error.stack;
|
||||
}
|
||||
|
||||
res.status(error.status ?? 500).send(error);
|
||||
}
|
||||
);
|
||||
|
||||
httpServer.listen(PORT);
|
||||
return apolloServer;
|
||||
};
|
||||
62
api/src/store/actions/setup-remote-access.ts
Normal file
62
api/src/store/actions/setup-remote-access.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
import {
|
||||
type SetupRemoteAccessInput,
|
||||
WAN_ACCESS_TYPE,
|
||||
WAN_FORWARD_TYPE,
|
||||
} from '@app/graphql/generated/api/types';
|
||||
import { DynamicRemoteAccessType } from '@app/remoteAccess/types';
|
||||
import { type AppDispatch, type RootState } from '@app/store/index';
|
||||
import { type MyServersConfig } from '@app/types/my-servers-config';
|
||||
import { createAsyncThunk } from '@reduxjs/toolkit';
|
||||
|
||||
const getDynamicRemoteAccessType = (
|
||||
accessType: WAN_ACCESS_TYPE,
|
||||
forwardType?: WAN_FORWARD_TYPE | undefined | null
|
||||
): DynamicRemoteAccessType => {
|
||||
// If access is disabled or always, DRA is disabled
|
||||
if (
|
||||
accessType === WAN_ACCESS_TYPE.DISABLED ||
|
||||
accessType === WAN_ACCESS_TYPE.ALWAYS
|
||||
) {
|
||||
return DynamicRemoteAccessType.DISABLED;
|
||||
}
|
||||
// if access is enabled and forward type is UPNP, DRA is UPNP, otherwise it is static
|
||||
return forwardType === WAN_FORWARD_TYPE.UPNP
|
||||
? DynamicRemoteAccessType.UPNP
|
||||
: DynamicRemoteAccessType.STATIC;
|
||||
};
|
||||
|
||||
export const setupRemoteAccessThunk = createAsyncThunk<
|
||||
Pick<
|
||||
MyServersConfig['remote'],
|
||||
'wanaccess' | 'wanport' | 'dynamicRemoteAccessType' | 'upnpEnabled'
|
||||
>,
|
||||
SetupRemoteAccessInput,
|
||||
{ state: RootState; dispatch: AppDispatch }
|
||||
>('config/setupRemoteAccess', async (payload) => {
|
||||
|
||||
if (payload.accessType === WAN_ACCESS_TYPE.DISABLED) {
|
||||
return {
|
||||
wanaccess: 'no',
|
||||
wanport: '',
|
||||
dynamicRemoteAccessType: DynamicRemoteAccessType.DISABLED,
|
||||
upnpEnabled: 'no',
|
||||
}
|
||||
}
|
||||
|
||||
if (payload.forwardType === WAN_FORWARD_TYPE.STATIC && !payload.port) {
|
||||
throw new Error('Missing port for WAN forward type STATIC');
|
||||
}
|
||||
|
||||
return {
|
||||
wanaccess: payload.accessType === WAN_ACCESS_TYPE.ALWAYS ? 'yes' : 'no',
|
||||
wanport:
|
||||
payload.forwardType === WAN_FORWARD_TYPE.STATIC
|
||||
? String(payload.port)
|
||||
: '',
|
||||
dynamicRemoteAccessType: getDynamicRemoteAccessType(
|
||||
payload.accessType,
|
||||
payload.forwardType
|
||||
),
|
||||
upnpEnabled: payload.forwardType === WAN_FORWARD_TYPE.UPNP ? 'yes' : 'no',
|
||||
};
|
||||
});
|
||||
51
api/src/store/listeners/array-event-listener.ts
Normal file
51
api/src/store/listeners/array-event-listener.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
import { logger } from '@app/core/log';
|
||||
import { getArrayData } from '@app/core/modules/array/get-array-data';
|
||||
import { PUBSUB_CHANNEL, pubsub } from '@app/core/pubsub';
|
||||
import { startAppListening } from '@app/store/listeners/listener-middleware';
|
||||
import { loadSingleStateFile } from '@app/store/modules/emhttp';
|
||||
import { StateFileKey } from '@app/store/types';
|
||||
import { isAnyOf } from '@reduxjs/toolkit';
|
||||
import { isEqual } from 'lodash';
|
||||
|
||||
export const enableArrayEventListener = () =>
|
||||
startAppListening({
|
||||
matcher: isAnyOf(loadSingleStateFile.fulfilled),
|
||||
async effect(
|
||||
action,
|
||||
{ getState, getOriginalState, delay, unsubscribe, subscribe }
|
||||
) {
|
||||
if (loadSingleStateFile.fulfilled.match(action)) {
|
||||
if (action.meta.arg === StateFileKey.disks) {
|
||||
unsubscribe();
|
||||
// getOriginalState must be called BEFORE the awaited delay in this function
|
||||
const oldArrayData = getArrayData(getOriginalState);
|
||||
await delay(5_000);
|
||||
const array = getArrayData(getState);
|
||||
if (!isEqual(oldArrayData, array)) {
|
||||
pubsub.publish(PUBSUB_CHANNEL.ARRAY, { array });
|
||||
logger.debug(
|
||||
{ event: array },
|
||||
'Array was updated, publishing event'
|
||||
);
|
||||
}
|
||||
|
||||
subscribe();
|
||||
} else if (action.meta.arg === StateFileKey.var) {
|
||||
if (
|
||||
!isEqual(
|
||||
getOriginalState().emhttp.var?.name,
|
||||
getState().emhttp.var?.name
|
||||
)
|
||||
) {
|
||||
await pubsub.publish(PUBSUB_CHANNEL.INFO, {
|
||||
info: {
|
||||
os: {
|
||||
hostname: getState().emhttp.var?.name,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
});
|
||||
77
api/src/store/listeners/config-listener.ts
Normal file
77
api/src/store/listeners/config-listener.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import { startAppListening } from '@app/store/listeners/listener-middleware';
|
||||
import { getDiff } from 'json-difference';
|
||||
import { isEqual } from 'lodash';
|
||||
import { logger } from '@app/core/log';
|
||||
import {
|
||||
type ConfigType,
|
||||
getWriteableConfig,
|
||||
} from '@app/core/utils/files/config-file-normalizer';
|
||||
import {
|
||||
loadConfigFile,
|
||||
loginUser,
|
||||
logoutUser,
|
||||
} from '@app/store/modules/config';
|
||||
import { FileLoadStatus } from '@app/store/types';
|
||||
import { safelySerializeObjectToIni } from '@app/core/utils/files/safe-ini-serializer';
|
||||
import { isFulfilled } from '@reduxjs/toolkit';
|
||||
import { environment } from '@app/environment';
|
||||
import { writeFileSync } from 'fs';
|
||||
|
||||
const actionIsLoginOrLogout = isFulfilled(logoutUser, loginUser);
|
||||
|
||||
export const enableConfigFileListener = (mode: ConfigType) => () =>
|
||||
startAppListening({
|
||||
predicate(action, currentState, previousState) {
|
||||
if (!environment.IS_MAIN_PROCESS) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (currentState.config.status === FileLoadStatus.LOADED) {
|
||||
const oldFlashConfig = previousState?.config.api.version
|
||||
? getWriteableConfig(previousState.config, mode)
|
||||
: null;
|
||||
const newFlashConfig = getWriteableConfig(
|
||||
currentState.config,
|
||||
mode
|
||||
);
|
||||
|
||||
if (
|
||||
!isEqual(oldFlashConfig, newFlashConfig) &&
|
||||
action.type !== loadConfigFile.fulfilled.type &&
|
||||
action.type !== loadConfigFile.rejected.type
|
||||
) {
|
||||
logger.trace(
|
||||
{
|
||||
diff: getDiff(oldFlashConfig ?? {}, newFlashConfig),
|
||||
},
|
||||
`${mode} Config Changed!`,
|
||||
'Action:',
|
||||
action.type
|
||||
);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
if (actionIsLoginOrLogout(action) && mode === 'memory') {
|
||||
logger.trace(
|
||||
'Logout / Login Action Encountered, writing memory config'
|
||||
);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
},
|
||||
async effect(_, { getState }) {
|
||||
const { paths, config } = getState();
|
||||
const pathToWrite =
|
||||
mode === 'flash'
|
||||
? paths['myservers-config']
|
||||
: paths['myservers-config-states'];
|
||||
const writeableConfig = getWriteableConfig(config, mode);
|
||||
const serializedConfig =
|
||||
safelySerializeObjectToIni(writeableConfig);
|
||||
logger.debug('Writing updated config to %s', pathToWrite);
|
||||
writeFileSync(pathToWrite, serializedConfig);
|
||||
},
|
||||
});
|
||||
40
api/src/store/listeners/server-state-listener.ts
Normal file
40
api/src/store/listeners/server-state-listener.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import { mothershipLogger } from '@app/core/log';
|
||||
import { PUBSUB_CHANNEL, pubsub } from '@app/core/pubsub';
|
||||
import { getServers } from '@app/graphql/schema/utils';
|
||||
import { isAPIStateDataFullyLoaded } from '@app/mothership/graphql-client';
|
||||
import { startAppListening } from '@app/store/listeners/listener-middleware';
|
||||
import { FileLoadStatus } from '@app/store/types';
|
||||
|
||||
import isEqual from 'lodash/isEqual';
|
||||
|
||||
export const enableServerStateListener = () =>
|
||||
startAppListening({
|
||||
predicate: (_, currState, prevState) => {
|
||||
if (currState.config.status === FileLoadStatus.LOADED && currState.emhttp.status === FileLoadStatus.LOADED ) {
|
||||
if (prevState.minigraph.status !== currState.minigraph.status || !isEqual(prevState.config.remote, currState.config.remote)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
},
|
||||
async effect(_, { getState }) {
|
||||
if (isAPIStateDataFullyLoaded(getState())) {
|
||||
const servers = getServers(getState);
|
||||
mothershipLogger.trace(
|
||||
'Got local server state',
|
||||
servers
|
||||
);
|
||||
if (servers.length > 0) {
|
||||
// Publish owner event
|
||||
await pubsub.publish(PUBSUB_CHANNEL.OWNER, {
|
||||
owner: servers[0].owner,
|
||||
});
|
||||
|
||||
// Publish servers event
|
||||
await pubsub.publish(PUBSUB_CHANNEL.SERVERS, {
|
||||
servers: servers,
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
});
|
||||
45
api/src/store/listeners/upnp-listener.ts
Normal file
45
api/src/store/listeners/upnp-listener.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import { startAppListening } from '@app/store/listeners/listener-middleware';
|
||||
import { type RootState } from '@app/store';
|
||||
import { disableUpnp, enableUpnp } from '@app/store/modules/upnp';
|
||||
import { upnpLogger } from '@app/core/log';
|
||||
import { loadConfigFile } from '@app/store/modules/config';
|
||||
import { loadSingleStateFile, loadStateFiles } from '@app/store/modules/emhttp';
|
||||
import { FileLoadStatus } from '@app/store/types';
|
||||
import { isAnyOf } from '@reduxjs/toolkit';
|
||||
import { setupRemoteAccessThunk } from '@app/store/actions/setup-remote-access';
|
||||
|
||||
const shouldUpnpBeEnabled = (state: RootState | null): boolean => {
|
||||
if (state?.config.status !== FileLoadStatus.LOADED || state?.emhttp.status !== FileLoadStatus.LOADED) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const { useUpnp } = state.emhttp.var;
|
||||
const { upnpEnabled, wanaccess } = state.config.remote;
|
||||
|
||||
return useUpnp && upnpEnabled === 'yes' && wanaccess === 'yes';
|
||||
};
|
||||
|
||||
const isStateOrConfigUpdate = isAnyOf(loadConfigFile.fulfilled, loadSingleStateFile.fulfilled, loadStateFiles.fulfilled, setupRemoteAccessThunk.fulfilled);
|
||||
|
||||
export const enableUpnpListener = () => startAppListening({
|
||||
predicate(action, currentState, previousState) {
|
||||
// @TODO: One of our actions is incorrectly configured. Sometimes the action is an anonymous function. We need to fix this.
|
||||
if ((isStateOrConfigUpdate(action) || !action?.type)
|
||||
&& (shouldUpnpBeEnabled(currentState) !== shouldUpnpBeEnabled(previousState))) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}, async effect(_, { getState, dispatch }) {
|
||||
const state = getState();
|
||||
const { config: { remote: { wanport } }, emhttp: { var: { portssl } } } = getState();
|
||||
upnpLogger.info('UPNP Enabled: (%s) Wan Port: [%s]', shouldUpnpBeEnabled(state), wanport === '' ? 'Will Generate New WAN Port' : wanport);
|
||||
|
||||
if (shouldUpnpBeEnabled(state)) {
|
||||
await dispatch(enableUpnp({ wanport, portssl }));
|
||||
} else {
|
||||
await dispatch(disableUpnp());
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
@@ -21,9 +21,10 @@ import { setGraphqlConnectionStatus } from '@app/store/actions/set-minigraph-sta
|
||||
import { getWriteableConfig } from '@app/core/utils/files/config-file-normalizer';
|
||||
import { writeFileSync } from 'fs';
|
||||
import { safelySerializeObjectToIni } from '@app/core/utils/files/safe-ini-serializer';
|
||||
import { pubsub } from '@app/core/pubsub';
|
||||
import { PUBSUB_CHANNEL, pubsub } from '@app/core/pubsub';
|
||||
import { DynamicRemoteAccessType } from '@app/remoteAccess/types';
|
||||
import { isEqual } from 'lodash';
|
||||
import { setupRemoteAccessThunk } from '@app/store/actions/setup-remote-access';
|
||||
|
||||
export type SliceState = {
|
||||
status: FileLoadStatus;
|
||||
@@ -79,7 +80,7 @@ export const loginUser = createAsyncThunk<
|
||||
username: userInfo.username,
|
||||
avatar: userInfo.avatar,
|
||||
};
|
||||
await pubsub.publish('owner', { owner });
|
||||
await pubsub.publish(PUBSUB_CHANNEL.OWNER, { owner });
|
||||
return userInfo;
|
||||
});
|
||||
|
||||
@@ -92,7 +93,7 @@ export const logoutUser = createAsyncThunk<
|
||||
const { pubsub } = await import('@app/core/pubsub');
|
||||
|
||||
// Publish to servers endpoint
|
||||
await pubsub.publish('servers', {
|
||||
await pubsub.publish(PUBSUB_CHANNEL.SERVERS, {
|
||||
servers: [],
|
||||
});
|
||||
|
||||
@@ -102,7 +103,7 @@ export const logoutUser = createAsyncThunk<
|
||||
avatar: '',
|
||||
};
|
||||
// Publish to owner endpoint
|
||||
await pubsub.publish('owner', { owner });
|
||||
await pubsub.publish(PUBSUB_CHANNEL.OWNER, { owner });
|
||||
});
|
||||
|
||||
/**
|
||||
@@ -325,6 +326,14 @@ export const config = createSlice({
|
||||
builder.addCase(setGraphqlConnectionStatus, (state, action) => {
|
||||
state.connectionStatus.minigraph = action.payload.status;
|
||||
});
|
||||
|
||||
builder.addCase(setupRemoteAccessThunk.fulfilled, (state, action) => {
|
||||
state.remote.wanaccess = action.payload.wanaccess;
|
||||
state.remote.dynamicRemoteAccessType =
|
||||
action.payload.dynamicRemoteAccessType;
|
||||
state.remote.wanport = action.payload.wanport;
|
||||
state.remote.upnpEnabled = action.payload.upnpEnabled;
|
||||
});
|
||||
},
|
||||
});
|
||||
const { actions, reducer } = config;
|
||||
|
||||
@@ -18,9 +18,6 @@ export const startStoreSync = async () => {
|
||||
const state = store.getState();
|
||||
// Config dependent options, wait until config loads to execute
|
||||
if (state.config.status === FileLoadStatus.LOADED) {
|
||||
// Update 2FA
|
||||
// await sync2FA();
|
||||
|
||||
// Update registration
|
||||
await syncRegistration(lastState);
|
||||
|
||||
|
||||
44
api/src/store/sync/info-apps-sync.ts
Normal file
44
api/src/store/sync/info-apps-sync.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import { logger } from '@app/core/log';
|
||||
import { PUBSUB_CHANNEL, pubsub } from '@app/core/pubsub';
|
||||
import { store } from '@app/store';
|
||||
import { DaemonConnectionStatus, type StoreSubscriptionHandler } from '@app/store/types';
|
||||
import { isEqual } from 'lodash';
|
||||
|
||||
type InfoAppsEvent = {
|
||||
info: {
|
||||
apps: {
|
||||
installed: number | null;
|
||||
running: number | null;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
export const createInfoAppsEvent = (state: Parameters<StoreSubscriptionHandler>[0]): InfoAppsEvent | null => {
|
||||
// Docker state isn't loaded
|
||||
if (state === null || state.docker.status === DaemonConnectionStatus.DISCONNECTED) return null;
|
||||
|
||||
return {
|
||||
info: {
|
||||
apps: {
|
||||
installed: state?.docker.installed,
|
||||
running: state?.docker.running,
|
||||
},
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
export const syncInfoApps: StoreSubscriptionHandler = async lastState => {
|
||||
const lastEvent = createInfoAppsEvent(lastState);
|
||||
const currentEvent = createInfoAppsEvent(store.getState());
|
||||
|
||||
// Skip if either event resolved to null
|
||||
if (lastEvent === null || currentEvent === null) return;
|
||||
|
||||
// Skip this if it's the same as the last one
|
||||
if (isEqual(lastEvent, currentEvent)) return;
|
||||
|
||||
logger.debug('Docker container count was updated, publishing event');
|
||||
|
||||
// Publish to graphql
|
||||
await pubsub.publish(PUBSUB_CHANNEL.INFO, currentEvent);
|
||||
};
|
||||
62
api/src/store/sync/registration-sync.ts
Normal file
62
api/src/store/sync/registration-sync.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
import { logger } from '@app/core/log';
|
||||
import { PUBSUB_CHANNEL, pubsub } from '@app/core/pubsub';
|
||||
import { store } from '@app/store';
|
||||
import { FileLoadStatus, type StoreSubscriptionHandler } from '@app/store/types';
|
||||
import isEqual from 'lodash/isEqual';
|
||||
|
||||
export type RegistrationEvent = {
|
||||
registration: {
|
||||
guid: string;
|
||||
type: string;
|
||||
state: string;
|
||||
keyFile: {
|
||||
location: string;
|
||||
contents: null;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
export const createRegistrationEvent = (state: Parameters<StoreSubscriptionHandler>[0]): RegistrationEvent | null => {
|
||||
// Var state isn't loaded
|
||||
if (state === null || Object.keys(state.emhttp.var).length === 0) return null;
|
||||
|
||||
const event = {
|
||||
registration: {
|
||||
guid: state.emhttp.var.regGuid,
|
||||
type: state.emhttp.var.regTy.toUpperCase(),
|
||||
state: state.emhttp.var.regState,
|
||||
keyFile: {
|
||||
location: state.emhttp.var.regFile,
|
||||
contents: state.registration.keyFile,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
return event;
|
||||
};
|
||||
|
||||
export const syncRegistration: StoreSubscriptionHandler = async lastState => {
|
||||
try {
|
||||
// Skip until we have the key and emhttp states loaded
|
||||
const { registration, emhttp } = store.getState();
|
||||
if (registration.status !== FileLoadStatus.LOADED) return;
|
||||
if (emhttp.status !== FileLoadStatus.LOADED) return;
|
||||
|
||||
const lastEvent = createRegistrationEvent(lastState);
|
||||
const currentEvent = createRegistrationEvent(store.getState());
|
||||
|
||||
// Skip if either event resolved to null
|
||||
if (lastEvent === null || currentEvent === null) return;
|
||||
|
||||
// Skip this if it's the same as the last one
|
||||
if (isEqual(lastEvent, currentEvent)) return;
|
||||
|
||||
logger.debug('Registration was updated, publishing event');
|
||||
|
||||
// Publish to graphql
|
||||
await pubsub.publish(PUBSUB_CHANNEL.REGISTRATION, currentEvent);
|
||||
} catch (error: unknown) {
|
||||
if (!(error instanceof Error)) throw new Error(`Failed publishing registration event with unknown error "${String(error)}"`);
|
||||
logger.error('Failed publishing registration event with "%s"', error.message);
|
||||
}
|
||||
};
|
||||
@@ -52,9 +52,7 @@ export const setupDockerWatch = async (): Promise<DockerEE> => {
|
||||
if (!watchedActions.includes(data.Action)) {
|
||||
return;
|
||||
}
|
||||
dockerLogger.addContext('data', data);
|
||||
dockerLogger.debug(`[${data.from}] ${data.Type}->${data.Action}`);
|
||||
dockerLogger.removeContext('data');
|
||||
await debouncedContainerCacheUpdate();
|
||||
}
|
||||
);
|
||||
|
||||
40
api/src/unraid-api/app/app.module.ts
Normal file
40
api/src/unraid-api/app/app.module.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import { apiLogger } from '@app/core/log';
|
||||
import { setupPermissions } from '@app/core/permissions';
|
||||
import { GraphqlAuthGuard } from '@app/unraid-api/auth/auth.guard';
|
||||
import { AuthModule } from '@app/unraid-api/auth/auth.module';
|
||||
import { GraphModule } from '@app/unraid-api/graph/graph.module';
|
||||
import { RestModule } from '@app/unraid-api/rest/rest.module';
|
||||
import { Module } from '@nestjs/common';
|
||||
import { Reflector } from '@nestjs/core';
|
||||
import { ACGuard, AccessControlModule } from 'nest-access-control';
|
||||
import { LoggerModule } from 'nestjs-pino';
|
||||
|
||||
@Module({
|
||||
imports: [
|
||||
LoggerModule.forRoot({
|
||||
pinoHttp: {
|
||||
logger: apiLogger,
|
||||
autoLogging: false
|
||||
},
|
||||
}),
|
||||
AccessControlModule.forRoles(setupPermissions()),
|
||||
AuthModule,
|
||||
GraphModule,
|
||||
RestModule,
|
||||
],
|
||||
controllers: [],
|
||||
providers: [
|
||||
{
|
||||
provide: 'APP_GUARD',
|
||||
useFactory: () =>
|
||||
new GraphqlAuthGuard(
|
||||
new Reflector(),
|
||||
),
|
||||
},
|
||||
{
|
||||
provide: 'APP_GUARD',
|
||||
useClass: ACGuard,
|
||||
},
|
||||
],
|
||||
})
|
||||
export class AppModule {}
|
||||
80
api/src/unraid-api/auth/auth.guard.ts
Normal file
80
api/src/unraid-api/auth/auth.guard.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
import { apiLogger } from '@app/core/log';
|
||||
import { BYPASS_PERMISSION_CHECKS } from '@app/environment';
|
||||
import { ServerHeaderStrategy } from '@app/unraid-api/auth/header.strategy';
|
||||
import { IS_PUBLIC_KEY } from '@app/unraid-api/auth/public.decorator';
|
||||
import {
|
||||
type ExecutionContext,
|
||||
Injectable,
|
||||
type CanActivate,
|
||||
UnauthorizedException,
|
||||
} from '@nestjs/common';
|
||||
import { Reflector } from '@nestjs/core';
|
||||
import { GqlExecutionContext, type GqlContextType } from '@nestjs/graphql';
|
||||
import { AuthGuard } from '@nestjs/passport';
|
||||
import { type Observable } from 'rxjs';
|
||||
|
||||
@Injectable()
|
||||
export class GraphqlAuthGuard
|
||||
extends AuthGuard([ServerHeaderStrategy.key])
|
||||
implements CanActivate
|
||||
{
|
||||
constructor(private readonly reflector: Reflector) {
|
||||
super();
|
||||
}
|
||||
|
||||
handleRequest<UserAccount>(err, user: UserAccount | null, info, context) {
|
||||
if (!user) {
|
||||
if (context) {
|
||||
const ctx = GqlExecutionContext.create(context);
|
||||
const fullContext = ctx.getContext();
|
||||
apiLogger.error(
|
||||
'No user found in request - connection params: %o',
|
||||
fullContext.connectionParams ?? {}
|
||||
);
|
||||
}
|
||||
|
||||
throw new UnauthorizedException('User not found');
|
||||
}
|
||||
|
||||
return user;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to determine if this middleware should run activate. If the route is marked as public, then it will not run.
|
||||
* @param context
|
||||
* @returns
|
||||
*/
|
||||
canActivate(
|
||||
context: ExecutionContext
|
||||
): boolean | Promise<boolean> | Observable<boolean> {
|
||||
const isPublic = this.reflector.getAllAndOverride<boolean>(
|
||||
IS_PUBLIC_KEY,
|
||||
[context.getHandler(), context.getClass()]
|
||||
);
|
||||
|
||||
if (isPublic) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return super.canActivate(context);
|
||||
}
|
||||
|
||||
getRequest(context: ExecutionContext) {
|
||||
if (context.getType<GqlContextType>() === 'graphql') {
|
||||
// headers are either inside context.getContext().connectionParams or in the request, which is in context.getContext().req (see context.ts)
|
||||
const ctx = GqlExecutionContext.create(context);
|
||||
const fullContext = ctx.getContext<any>();
|
||||
const request = fullContext.req ?? {};
|
||||
const additionalConnectionParamHeaders =
|
||||
fullContext.connectionParams ?? {};
|
||||
request.headers = {
|
||||
...(request.headers ?? {}),
|
||||
...additionalConnectionParamHeaders,
|
||||
};
|
||||
|
||||
return request;
|
||||
} else {
|
||||
return context.switchToHttp().getRequest();
|
||||
}
|
||||
}
|
||||
}
|
||||
11
api/src/unraid-api/auth/auth.module.ts
Normal file
11
api/src/unraid-api/auth/auth.module.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { AuthService } from './auth.service';
|
||||
import { UsersModule } from '@app/unraid-api/users/users.module';
|
||||
import { PassportModule } from '@nestjs/passport';
|
||||
import { ServerHeaderStrategy } from '@app/unraid-api/auth/header.strategy';
|
||||
|
||||
@Module({
|
||||
imports: [UsersModule, PassportModule],
|
||||
providers: [AuthService, ServerHeaderStrategy],
|
||||
})
|
||||
export class AuthModule {}
|
||||
18
api/src/unraid-api/auth/auth.service.spec.ts
Normal file
18
api/src/unraid-api/auth/auth.service.spec.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
import { AuthService } from './auth.service';
|
||||
|
||||
describe('AuthService', () => {
|
||||
let service: AuthService;
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [AuthService],
|
||||
}).compile();
|
||||
|
||||
service = module.get<AuthService>(AuthService);
|
||||
});
|
||||
|
||||
it('should be defined', () => {
|
||||
expect(service).toBeDefined();
|
||||
});
|
||||
});
|
||||
17
api/src/unraid-api/auth/auth.service.ts
Normal file
17
api/src/unraid-api/auth/auth.service.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { type UserAccount } from '@app/graphql/generated/api/types';
|
||||
import { UsersService } from '@app/unraid-api/users/users.service';
|
||||
import { Injectable, UnauthorizedException } from '@nestjs/common';
|
||||
|
||||
@Injectable()
|
||||
export class AuthService {
|
||||
constructor(private usersService: UsersService) {}
|
||||
|
||||
async validateUser(apiKey: string): Promise<UserAccount> {
|
||||
|
||||
const user = this.usersService.findOne(apiKey);
|
||||
if (user) {
|
||||
return user;
|
||||
}
|
||||
throw new UnauthorizedException('Invalid API key');
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user