mirror of
https://github.com/unraid/api.git
synced 2026-01-02 14:40:01 -06:00
Compare commits
5 Commits
fix/back-t
...
v3.1.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a91732959b | ||
|
|
2011cf453d | ||
|
|
f9d656fc0d | ||
|
|
0a9b047170 | ||
|
|
abd66d6082 |
3
.github/workflows/main.yml
vendored
3
.github/workflows/main.yml
vendored
@@ -107,7 +107,6 @@ jobs:
|
||||
run:
|
||||
working-directory: api
|
||||
runs-on: ubuntu-latest
|
||||
needs: [lint-api, test-api]
|
||||
|
||||
outputs:
|
||||
API_VERSION: ${{ steps.build-pack-binary.outputs.API_VERSION }}
|
||||
@@ -162,11 +161,11 @@ jobs:
|
||||
path: ${{ github.workspace }}/api/deploy/release/*.tgz
|
||||
|
||||
build-plugin:
|
||||
needs: [lint-api, test-api, build-api]
|
||||
defaults:
|
||||
run:
|
||||
working-directory: plugin
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build-api]
|
||||
steps:
|
||||
- name: Set Timezone
|
||||
uses: szenius/set-timezone@v1.2
|
||||
|
||||
3
.github/workflows/pull-request.yml
vendored
3
.github/workflows/pull-request.yml
vendored
@@ -92,7 +92,6 @@ jobs:
|
||||
run:
|
||||
working-directory: api
|
||||
runs-on: ubuntu-latest
|
||||
needs: [lint-api, test-api]
|
||||
|
||||
outputs:
|
||||
API_VERSION: ${{ steps.build-pack-binary.outputs.API_VERSION }}
|
||||
@@ -158,7 +157,7 @@ jobs:
|
||||
run:
|
||||
working-directory: plugin
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build-api]
|
||||
needs: [lint-api, test-api, build-api]
|
||||
steps:
|
||||
- name: Set Timezone
|
||||
uses: szenius/set-timezone@v1.2
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
[api]
|
||||
version="3.0.1+b26ff388"
|
||||
version="3.1.0"
|
||||
[local]
|
||||
[notifier]
|
||||
apikey="unnotify_30994bfaccf839c65bae75f7fa12dd5ee16e69389f754c3b98ed7d5"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
[api]
|
||||
version="3.0.1+b26ff388"
|
||||
version="3.1.0"
|
||||
[local]
|
||||
[notifier]
|
||||
apikey="unnotify_30994bfaccf839c65bae75f7fa12dd5ee16e69389f754c3b98ed7d5"
|
||||
|
||||
54
api/package-lock.json
generated
54
api/package-lock.json
generated
@@ -16,7 +16,6 @@
|
||||
"@graphql-tools/merge": "^8.4.0",
|
||||
"@graphql-tools/schema": "^9.0.17",
|
||||
"@graphql-tools/utils": "^9.2.1",
|
||||
"@gridplus/docker-events": "github:unraid/docker-events",
|
||||
"@reduxjs/toolkit": "^1.9.5",
|
||||
"@reflet/cron": "^1.3.1",
|
||||
"@runonflux/nat-upnp": "^1.0.2",
|
||||
@@ -35,6 +34,7 @@
|
||||
"convert": "^4.10.0",
|
||||
"cors": "^2.8.5",
|
||||
"cross-fetch": "^3.1.5",
|
||||
"docker-event-emitter": "^0.3.0",
|
||||
"dockerode": "^3.3.5",
|
||||
"dotenv": "^16.0.3",
|
||||
"express": "^4.18.2",
|
||||
@@ -2857,22 +2857,6 @@
|
||||
"graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@gridplus/docker-events": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "git+ssh://git@github.com/unraid/docker-events.git#5eb1e71044d9f60e8227b1022bdd99139c82a57f",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"debug": "^3.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@gridplus/docker-events/node_modules/debug": {
|
||||
"version": "3.2.7",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz",
|
||||
"integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==",
|
||||
"dependencies": {
|
||||
"ms": "^2.1.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@humanwhocodes/config-array": {
|
||||
"version": "0.11.8",
|
||||
"resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.8.tgz",
|
||||
@@ -6790,6 +6774,17 @@
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/docker-event-emitter": {
|
||||
"version": "0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/docker-event-emitter/-/docker-event-emitter-0.3.0.tgz",
|
||||
"integrity": "sha512-QWpJsTOcLOiOctbCTH3T+w34Aw+zK6JzTh8xOqD/5/dDEhPhnCFmR8VzsCvTYAlTmkgxMUkRMTlBz1sGNZB5vg==",
|
||||
"dependencies": {
|
||||
"debug": "^4.1.1"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"dockerode": "^3.0.2"
|
||||
}
|
||||
},
|
||||
"node_modules/docker-modem": {
|
||||
"version": "3.0.8",
|
||||
"resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-3.0.8.tgz",
|
||||
@@ -18249,23 +18244,6 @@
|
||||
"integrity": "sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ==",
|
||||
"requires": {}
|
||||
},
|
||||
"@gridplus/docker-events": {
|
||||
"version": "git+ssh://git@github.com/unraid/docker-events.git#5eb1e71044d9f60e8227b1022bdd99139c82a57f",
|
||||
"from": "@gridplus/docker-events@github:unraid/docker-events",
|
||||
"requires": {
|
||||
"debug": "^3.1.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"debug": {
|
||||
"version": "3.2.7",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz",
|
||||
"integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==",
|
||||
"requires": {
|
||||
"ms": "^2.1.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"@humanwhocodes/config-array": {
|
||||
"version": "0.11.8",
|
||||
"resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.8.tgz",
|
||||
@@ -21224,6 +21202,14 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"docker-event-emitter": {
|
||||
"version": "0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/docker-event-emitter/-/docker-event-emitter-0.3.0.tgz",
|
||||
"integrity": "sha512-QWpJsTOcLOiOctbCTH3T+w34Aw+zK6JzTh8xOqD/5/dDEhPhnCFmR8VzsCvTYAlTmkgxMUkRMTlBz1sGNZB5vg==",
|
||||
"requires": {
|
||||
"debug": "^4.1.1"
|
||||
}
|
||||
},
|
||||
"docker-modem": {
|
||||
"version": "3.0.8",
|
||||
"resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-3.0.8.tgz",
|
||||
|
||||
@@ -64,7 +64,6 @@
|
||||
"@graphql-tools/merge": "^8.4.0",
|
||||
"@graphql-tools/schema": "^9.0.17",
|
||||
"@graphql-tools/utils": "^9.2.1",
|
||||
"@gridplus/docker-events": "github:unraid/docker-events",
|
||||
"@reduxjs/toolkit": "^1.9.5",
|
||||
"@reflet/cron": "^1.3.1",
|
||||
"@runonflux/nat-upnp": "^1.0.2",
|
||||
@@ -83,6 +82,7 @@
|
||||
"convert": "^4.10.0",
|
||||
"cors": "^2.8.5",
|
||||
"cross-fetch": "^3.1.5",
|
||||
"docker-event-emitter": "^0.3.0",
|
||||
"dockerode": "^3.3.5",
|
||||
"dotenv": "^16.0.3",
|
||||
"express": "^4.18.2",
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
import { getBannerPathIfPresent, getCasePathIfPresent } from "@app/core/utils/images/image-file-helpers";
|
||||
import { store } from "@app/store/index";
|
||||
import { loadDynamixConfigFile } from "@app/store/modules/dynamix";
|
||||
|
||||
import { expect, test } from "vitest";
|
||||
|
||||
test('get case path returns expected result', () => {
|
||||
expect(getCasePathIfPresent()).resolves.toContain('/dev/dynamix/case-model.png')
|
||||
})
|
||||
|
||||
test('get banner path returns null (state unloaded)', () => {
|
||||
expect(getBannerPathIfPresent()).resolves.toMatchInlineSnapshot('null')
|
||||
})
|
||||
|
||||
test('get banner path returns the banner (state loaded)', async() => {
|
||||
await store.dispatch(loadDynamixConfigFile()).unwrap();
|
||||
expect(getBannerPathIfPresent()).resolves.toContain('/dev/dynamix/banner.png');
|
||||
})
|
||||
|
||||
test('get banner path returns null when no banner (state loaded)', async () => {
|
||||
await store.dispatch(loadDynamixConfigFile()).unwrap();
|
||||
expect(getBannerPathIfPresent('notabanner.png')).resolves.toMatchInlineSnapshot('null');
|
||||
});
|
||||
@@ -24,6 +24,7 @@ test('Returns paths', async () => {
|
||||
"keyfile-base",
|
||||
"machine-id",
|
||||
"log-base",
|
||||
"var-run",
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
@@ -23,6 +23,7 @@ import {
|
||||
} from '@apollo/client/core/core.cjs';
|
||||
import { MinigraphStatus } from '@app/graphql/generated/api/types';
|
||||
import { API_VERSION } from '@app/environment';
|
||||
import { loadStateFiles } from '@app/store/modules/emhttp';
|
||||
|
||||
type CloudQueryResult = NonNullable<
|
||||
ApolloQueryResult<getCloudQuery>['data']['cloud']
|
||||
@@ -343,6 +344,7 @@ export const report = async (...argv: string[]) => {
|
||||
|
||||
// Load my servers config file into store
|
||||
await store.dispatch(loadConfigFile());
|
||||
await store.dispatch(loadStateFiles());
|
||||
|
||||
const { config } = store.getState();
|
||||
if (!config.upc.apikey) throw new Error('Missing UPC API key');
|
||||
|
||||
141
api/src/common/dashboard/generate-data.ts
Normal file
141
api/src/common/dashboard/generate-data.ts
Normal file
@@ -0,0 +1,141 @@
|
||||
import { ConnectListAllDomainsFlags } from '@vmngr/libvirt';
|
||||
import { getHypervisor } from '@app/core/utils/vms/get-hypervisor';
|
||||
import display from '@app/graphql/resolvers/query/display';
|
||||
import { docker } from '@app/core/utils/clients/docker';
|
||||
import { getUnraidVersion } from '@app/common/dashboard/get-unraid-version';
|
||||
import { getArray } from '@app/common/dashboard/get-array';
|
||||
import { bootTimestamp } from '@app/common/dashboard/boot-timestamp';
|
||||
import { dashboardLogger } from '@app/core/log';
|
||||
import { getters, store } from '@app/store';
|
||||
import { type DashboardServiceInput, type DashboardInput } from '@app/graphql/generated/client/graphql';
|
||||
import { API_VERSION } from '@app/environment';
|
||||
import { DynamicRemoteAccessType } from '@app/remoteAccess/types';
|
||||
import { DashboardInputSchema } from '@app/graphql/generate/validators';
|
||||
import { ZodError } from 'zod';
|
||||
|
||||
const getVmSummary = async (): Promise<DashboardInput['vms']> => {
|
||||
try {
|
||||
const hypervisor = await getHypervisor();
|
||||
if (!hypervisor) {
|
||||
return {
|
||||
installed: 0,
|
||||
started: 0,
|
||||
};
|
||||
}
|
||||
|
||||
const activeDomains = await hypervisor.connectListAllDomains(ConnectListAllDomainsFlags.ACTIVE) as unknown[];
|
||||
const inactiveDomains = await hypervisor.connectListAllDomains(ConnectListAllDomainsFlags.INACTIVE) as unknown[];
|
||||
return {
|
||||
installed: activeDomains.length + inactiveDomains.length,
|
||||
started: activeDomains.length,
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
installed: 0,
|
||||
started: 0,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
/*
|
||||
const twoFactor = (): Dashboard['twoFactor'] => {
|
||||
const { isRemoteEnabled, isLocalEnabled } = checkTwoFactorEnabled();
|
||||
return {
|
||||
remote: {
|
||||
enabled: isRemoteEnabled,
|
||||
},
|
||||
local: {
|
||||
enabled: isLocalEnabled,
|
||||
},
|
||||
};
|
||||
}; */
|
||||
|
||||
const getDynamicRemoteAccessService = (): DashboardServiceInput | null => {
|
||||
const uptimeTimestamp = bootTimestamp.toISOString();
|
||||
|
||||
const { config, dynamicRemoteAccess } = store.getState();
|
||||
const enabledStatus = config.remote.dynamicRemoteAccessType;
|
||||
|
||||
return {
|
||||
name: 'dynamic-remote-access',
|
||||
online: enabledStatus !== DynamicRemoteAccessType.DISABLED,
|
||||
version: dynamicRemoteAccess.runningType,
|
||||
uptime: {
|
||||
timestamp: new Date(uptimeTimestamp),
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
const services = (): DashboardInput['services'] => {
|
||||
const uptimeTimestamp = bootTimestamp.toISOString();
|
||||
const dynamicRemoteAccess = getDynamicRemoteAccessService();
|
||||
return [{
|
||||
name: 'unraid-api',
|
||||
online: true,
|
||||
uptime: {
|
||||
timestamp: new Date(uptimeTimestamp),
|
||||
},
|
||||
version: API_VERSION,
|
||||
},
|
||||
...(dynamicRemoteAccess ? [dynamicRemoteAccess] : [])];
|
||||
};
|
||||
|
||||
const getData = async (): Promise<DashboardInput> => {
|
||||
const emhttp = getters.emhttp();
|
||||
const docker = getters.docker();
|
||||
|
||||
return {
|
||||
vars: {
|
||||
regState: emhttp.var.regState,
|
||||
regTy: emhttp.var.regTy,
|
||||
flashGuid: emhttp.var.flashGuid,
|
||||
},
|
||||
apps: {
|
||||
installed: docker.installed ?? 0,
|
||||
started: docker.running ?? 0
|
||||
},
|
||||
versions: {
|
||||
unraid: await getUnraidVersion(),
|
||||
},
|
||||
os: {
|
||||
hostname: emhttp.var.name,
|
||||
uptime: new Date(bootTimestamp.toISOString()),
|
||||
},
|
||||
vms: await getVmSummary(),
|
||||
array: getArray(),
|
||||
services: services(),
|
||||
display: await display(),
|
||||
config: emhttp.var.configValid ? { valid: true } : {
|
||||
valid: false,
|
||||
error: {
|
||||
error: 'UNKNOWN_ERROR',
|
||||
invalid: 'INVALID',
|
||||
nokeyserver: 'NO_KEY_SERVER',
|
||||
withdrawn: 'WITHDRAWN',
|
||||
}[emhttp.var.configState] ?? 'UNKNOWN_ERROR',
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
export const generateData = async (): Promise<DashboardInput | null> => {
|
||||
const data = await getData();
|
||||
|
||||
try {
|
||||
// Validate generated data
|
||||
// @TODO: Fix this runtype to use generated types from the Zod validators (as seen in mothership Codegen)
|
||||
const result = DashboardInputSchema().parse(data)
|
||||
|
||||
return result
|
||||
|
||||
} catch (error: unknown) {
|
||||
// Log error for user
|
||||
if (error instanceof ZodError) {
|
||||
dashboardLogger.error('Failed validation with issues: ' , error.issues.map(issue => ({ message: issue.message, path: issue.path.join(',') })))
|
||||
} else {
|
||||
dashboardLogger.error('Failed validating dashboard object: ', error, data);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
92
api/src/core/modules/docker/get-docker-containers.ts
Normal file
92
api/src/core/modules/docker/get-docker-containers.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
/*!
|
||||
* Copyright 2019-2022 Lime Technology Inc. All rights reserved.
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
|
||||
import fs from 'fs';
|
||||
import camelCaseKeys from 'camelcase-keys';
|
||||
import { catchHandlers } from '@app/core/utils/misc/catch-handlers';
|
||||
import { getters, store } from '@app/store';
|
||||
import { updateDockerState } from '@app/store/modules/docker'
|
||||
|
||||
import {
|
||||
type ContainerPort,
|
||||
ContainerPortType,
|
||||
type DockerContainer,
|
||||
ContainerState,
|
||||
} from '@app/graphql/generated/api/types';
|
||||
import { dockerLogger } from '@app/core/log';
|
||||
import { docker } from '@app/core/utils/clients/docker';
|
||||
|
||||
/**
|
||||
* Get all Docker containers.
|
||||
* @returns All the in/active Docker containers on the system.
|
||||
*/
|
||||
|
||||
export const getDockerContainers = async (
|
||||
{ useCache } = { useCache: true }
|
||||
): Promise<Array<DockerContainer>> => {
|
||||
const dockerState = getters.docker()
|
||||
if (useCache && dockerState.containers) {
|
||||
dockerLogger.trace('Using docker container cache');
|
||||
return dockerState.containers;
|
||||
}
|
||||
|
||||
dockerLogger.trace('Skipping docker container cache');
|
||||
|
||||
/**
|
||||
* Docker auto start file
|
||||
*
|
||||
* @note Doesn't exist if array is offline.
|
||||
* @see https://github.com/limetech/webgui/issues/502#issue-480992547
|
||||
*/
|
||||
const autoStartFile = await fs.promises
|
||||
.readFile(getters.paths()['docker-autostart'], 'utf8')
|
||||
.then((file) => file.toString())
|
||||
.catch(() => '');
|
||||
const autoStarts = autoStartFile.split('\n');
|
||||
const rawContainers = await docker
|
||||
.listContainers({
|
||||
all: true,
|
||||
size: true,
|
||||
})
|
||||
.then((containers) =>
|
||||
containers.map((object) => camelCaseKeys(object, { deep: true }))
|
||||
)
|
||||
// If docker throws an error return no containers
|
||||
.catch(catchHandlers.docker);
|
||||
|
||||
// Cleanup container object
|
||||
const containers: Array<DockerContainer> = rawContainers.map<DockerContainer>(
|
||||
(container) => {
|
||||
const names = container.names[0];
|
||||
const containerData: DockerContainer = {
|
||||
...container,
|
||||
labels: container.labels,
|
||||
// @ts-expect-error sizeRootFs is not on the dockerode type, but is fetched when size: true is set
|
||||
sizeRootFs: container.sizeRootFs ?? undefined,
|
||||
imageId: container.imageID,
|
||||
state:
|
||||
typeof container?.state === 'string'
|
||||
? ContainerState[container.state.toUpperCase()] ??
|
||||
ContainerState.EXITED
|
||||
: ContainerState.EXITED,
|
||||
autoStart: autoStarts.includes(names.split('/')[1]),
|
||||
ports: container.ports.map<ContainerPort>((port) => ({
|
||||
...port,
|
||||
type: ContainerPortType[port.type.toUpperCase()],
|
||||
})),
|
||||
};
|
||||
return containerData;
|
||||
}
|
||||
);
|
||||
|
||||
// Get all of the current containers
|
||||
const installed = containers.length;
|
||||
const running = containers.filter(
|
||||
(container) => container.state === ContainerState.RUNNING
|
||||
).length;
|
||||
|
||||
store.dispatch(updateDockerState({ containers, installed, running }))
|
||||
return containers;
|
||||
};
|
||||
53
api/src/core/utils/images/image-file-helpers.ts
Normal file
53
api/src/core/utils/images/image-file-helpers.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import { getters } from '@app/store/index';
|
||||
import { FileLoadStatus } from '@app/store/types';
|
||||
import { readFile, stat } from 'node:fs/promises';
|
||||
import { join } from 'node:path';
|
||||
|
||||
const isImageFile = async (path: string): Promise<boolean> => {
|
||||
try {
|
||||
const stats = await stat(path);
|
||||
if (stats.size < 25) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (error: unknown) {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
export const getCasePathIfPresent = async (): Promise<string | null> => {
|
||||
|
||||
const dynamixBasePath = getters.paths()['dynamix-base'];
|
||||
|
||||
const configFilePath = join(dynamixBasePath, 'case-model.cfg');
|
||||
const caseImagePath = join(dynamixBasePath, 'case-model.png');
|
||||
try {
|
||||
const caseConfig = await readFile(configFilePath, 'utf-8');
|
||||
if (caseConfig.includes('.') && (await isImageFile(caseImagePath))) {
|
||||
return caseImagePath;
|
||||
}
|
||||
|
||||
return null;
|
||||
} catch (error: unknown) {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
export const getBannerPathIfPresent = async (
|
||||
filename = 'banner.png'
|
||||
): Promise<string | null> => {
|
||||
if (getters.dynamix().status === FileLoadStatus.LOADED && getters.dynamix().display?.banner) {
|
||||
const dynamixBasePath = getters.paths()['dynamix-base'];
|
||||
const customBannerPath = join(dynamixBasePath, filename);
|
||||
const defaultBannerPath = '/usr/local/emhttp/plugins/dynamix/images/banner.png';
|
||||
if (await isImageFile(customBannerPath)) {
|
||||
return customBannerPath;
|
||||
}
|
||||
if (await isImageFile(defaultBannerPath)) {
|
||||
return defaultBannerPath;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
@@ -5,9 +5,7 @@ import {
|
||||
split,
|
||||
} from '@apollo/client/core/core.cjs';
|
||||
import { onError } from '@apollo/client/link/error';
|
||||
import {
|
||||
getInternalApiAddress,
|
||||
} from '@app/consts';
|
||||
import { getInternalApiAddress } from '@app/consts';
|
||||
import WebSocket from 'ws';
|
||||
import { fetch } from 'cross-fetch';
|
||||
import { getMainDefinition } from '@apollo/client/utilities';
|
||||
@@ -27,11 +25,14 @@ const getWebsocketWithHeaders = () => {
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
export const getApiApolloClient = ({ upcApiKey }: { upcApiKey: string }) => {
|
||||
const nginxPort = getters?.emhttp()?.nginx?.httpPort ?? 80;
|
||||
graphqlLogger.debug('Internal GraphQL URL: %s', getInternalApiAddress(true, nginxPort));
|
||||
graphqlLogger.debug(
|
||||
'Internal GraphQL URL: %s',
|
||||
getInternalApiAddress(true, nginxPort)
|
||||
);
|
||||
const httpLink = new HttpLink({
|
||||
uri: getInternalApiAddress(true, nginxPort),
|
||||
fetch,
|
||||
|
||||
30
api/src/graphql/express/get-images.ts
Normal file
30
api/src/graphql/express/get-images.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import { getBannerPathIfPresent, getCasePathIfPresent } from "@app/core/utils/images/image-file-helpers";
|
||||
import { apiKeyToUser } from "@app/graphql/index";
|
||||
import { type Request, type Response } from "express";
|
||||
export const getImages = async (req: Request, res: Response) => {
|
||||
// @TODO - Clean up this function
|
||||
const apiKey = req.headers['x-api-key'];
|
||||
if (
|
||||
apiKey &&
|
||||
typeof apiKey === 'string' &&
|
||||
(await apiKeyToUser(apiKey)).role !== 'guest'
|
||||
) {
|
||||
if (req.params.type === 'banner') {
|
||||
const path = await getBannerPathIfPresent();
|
||||
if (path) {
|
||||
res.sendFile(path);
|
||||
return;
|
||||
}
|
||||
} else if (req.params.type === 'case') {
|
||||
const path = await getCasePathIfPresent();
|
||||
if (path) {
|
||||
res.sendFile(path);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
return res.status(404).send('no customization of this type found');
|
||||
}
|
||||
|
||||
return res.status(403).send('unauthorized');
|
||||
};
|
||||
16
api/src/graphql/resolvers/query/docker.ts
Normal file
16
api/src/graphql/resolvers/query/docker.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { getDockerContainers } from "@app/core/modules/index";
|
||||
import { ensurePermission } from "@app/core/utils/permissions/ensure-permission";
|
||||
import { type QueryResolvers } from "@app/graphql/generated/api/types";
|
||||
|
||||
export const dockerContainersResolver: QueryResolvers['dockerContainers'] = async (_, __, context) => {
|
||||
const { user } = context;
|
||||
|
||||
// Check permissions
|
||||
ensurePermission(user, {
|
||||
resource: 'docker/container',
|
||||
action: 'read',
|
||||
possession: 'any',
|
||||
});
|
||||
|
||||
return getDockerContainers();
|
||||
}
|
||||
@@ -1,11 +1,11 @@
|
||||
import { getArray } from '@app/core/modules/get-array';
|
||||
import { getDockerContainers } from '@app/core/modules/index';
|
||||
import { type QueryResolvers } from '@app/graphql/generated/api/types';
|
||||
import cloud from '@app/graphql/resolvers/query/cloud';
|
||||
import { config } from '@app/graphql/resolvers/query/config';
|
||||
import crashReportingEnabled from '@app/graphql/resolvers/query/crash-reporting-enabled';
|
||||
import { disksResolver } from '@app/graphql/resolvers/query/disks';
|
||||
import display from '@app/graphql/resolvers/query/display';
|
||||
import { dockerContainersResolver } from '@app/graphql/resolvers/query/docker';
|
||||
import flash from '@app/graphql/resolvers/query/flash';
|
||||
import online from '@app/graphql/resolvers/query/online';
|
||||
import owner from '@app/graphql/resolvers/query/owner';
|
||||
@@ -21,7 +21,7 @@ export const Query: QueryResolvers = {
|
||||
config,
|
||||
crashReportingEnabled,
|
||||
disks: disksResolver,
|
||||
dockerContainers: getDockerContainers,
|
||||
dockerContainers: dockerContainersResolver,
|
||||
display,
|
||||
flash,
|
||||
online,
|
||||
|
||||
@@ -25,7 +25,9 @@ export const executeRemoteGraphQLQuery = async (
|
||||
upcApiKey: apiKey
|
||||
});
|
||||
if (ENVIRONMENT === 'development') {
|
||||
remoteQueryLogger.debug('Running query', parsedQuery.query);
|
||||
remoteQueryLogger.addContext('query', parsedQuery.query);
|
||||
remoteQueryLogger.debug('[DEVONLY] Running query');
|
||||
remoteQueryLogger.removeContext('query');
|
||||
}
|
||||
const localResult = await localClient.query({
|
||||
query: parsedQuery.query,
|
||||
|
||||
@@ -19,13 +19,13 @@ import { loadRegistrationKey } from '@app/store/modules/registration';
|
||||
import { createApolloExpressServer } from '@app/server';
|
||||
import { unlinkSync } from 'fs';
|
||||
import { fileExistsSync } from '@app/core/utils/files/file-exists';
|
||||
import { setupDockerWatch } from '@app/store/watch/docker-watch';
|
||||
import { PORT, environment } from '@app/environment';
|
||||
import { shutdownApiEvent } from '@app/store/actions/shutdown-api-event';
|
||||
import { PingTimeoutJobs } from '@app/mothership/jobs/ping-timeout-jobs';
|
||||
import { type BaseContext, type ApolloServer } from '@apollo/server';
|
||||
import { loadDynamixConfigFile } from '@app/store/modules/dynamix';
|
||||
import { setupDynamixConfigWatch } from '@app/store/watch/dynamix-config-watch';
|
||||
import { setupVarRunWatch } from '@app/store/watch/var-run-watch';
|
||||
|
||||
let server: ApolloServer<BaseContext>;
|
||||
|
||||
@@ -68,7 +68,7 @@ void am(
|
||||
setupRegistrationKeyWatch();
|
||||
|
||||
// Start listening to docker events
|
||||
setupDockerWatch();
|
||||
await setupVarRunWatch();
|
||||
|
||||
// Start listening to dynamix config file changes
|
||||
setupDynamixConfigWatch();
|
||||
|
||||
33
api/src/store/modules/docker.ts
Normal file
33
api/src/store/modules/docker.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { createSlice, type PayloadAction } from '@reduxjs/toolkit';
|
||||
import merge from 'lodash/merge';
|
||||
import { DaemonConnectionStatus } from '@app/store/types';
|
||||
import { type DockerContainer } from '@app/graphql/generated/api/types';
|
||||
|
||||
type DockerState = {
|
||||
status: DaemonConnectionStatus;
|
||||
installed: number | null;
|
||||
running: number | null;
|
||||
containers: DockerContainer[];
|
||||
};
|
||||
|
||||
const initialState: DockerState = {
|
||||
status: DaemonConnectionStatus.DISCONNECTED,
|
||||
installed: null,
|
||||
running: null,
|
||||
containers: [],
|
||||
};
|
||||
|
||||
export const docker = createSlice({
|
||||
name: 'docker',
|
||||
initialState,
|
||||
reducers: {
|
||||
updateDockerState(state, action: PayloadAction<Partial<typeof initialState>>) {
|
||||
state.status = action.payload.status ?? initialState.status;
|
||||
state.installed = action.payload.installed ?? initialState.installed;
|
||||
state.running = action.payload.running ?? initialState.running;
|
||||
state.containers = action.payload.containers ?? initialState.containers;
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
export const { updateDockerState } = docker.actions;
|
||||
@@ -1,19 +1,19 @@
|
||||
import { parseConfig } from '@app/core/utils/misc/parse-config';
|
||||
import { createAsyncThunk, createSlice, PayloadAction } from '@reduxjs/toolkit';
|
||||
import { createAsyncThunk, createSlice, type PayloadAction } from '@reduxjs/toolkit';
|
||||
import { access } from 'fs/promises';
|
||||
import merge from 'lodash/merge';
|
||||
import { FileLoadStatus } from '@app/store/types';
|
||||
import { F_OK } from 'constants';
|
||||
import { RecursivePartial, RecursiveNullable } from '@app/types';
|
||||
import { type RecursivePartial, type RecursiveNullable } from '@app/types';
|
||||
import { toBoolean } from '@app/core/utils/casting';
|
||||
import { DynamixConfig } from '@app/core/types/ini';
|
||||
import { type DynamixConfig } from '@app/core/types/ini';
|
||||
|
||||
export type SliceState = {
|
||||
status: FileLoadStatus;
|
||||
status: FileLoadStatus;
|
||||
} & DynamixConfig;
|
||||
|
||||
export const initialState: Partial<SliceState> = {
|
||||
status: FileLoadStatus.UNLOADED,
|
||||
status: FileLoadStatus.UNLOADED,
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -21,53 +21,69 @@ export const initialState: Partial<SliceState> = {
|
||||
*
|
||||
* Note: If the file doesn't exist this will fallback to default values.
|
||||
*/
|
||||
export const loadDynamixConfigFile = createAsyncThunk<RecursiveNullable<RecursivePartial<DynamixConfig>>, string | undefined>('config/load-dynamix-config-file', async filePath => {
|
||||
const store = await import('@app/store');
|
||||
const paths = store.getters.paths();
|
||||
const path = filePath ?? paths['dynamix-config'];
|
||||
const fileExists = await access(path, F_OK).then(() => true).catch(() => false);
|
||||
const file = fileExists ? parseConfig<RecursivePartial<DynamixConfig>>({
|
||||
filePath: path,
|
||||
type: 'ini',
|
||||
}) : {};
|
||||
const { display } = file;
|
||||
return merge(file, {
|
||||
...(display?.scale ? { scale: toBoolean(display?.scale) } : {}),
|
||||
...(display?.tabs ? { tabs: toBoolean(display?.tabs) } : {}),
|
||||
...(display?.resize ? { resize: toBoolean(display?.resize) } : {}),
|
||||
...(display?.wwn ? { wwn: toBoolean(display?.wwn) } : {}),
|
||||
...(display?.total ? { total: toBoolean(display?.total) } : {}),
|
||||
...(display?.usage ? { usage: toBoolean(display?.usage) } : {}),
|
||||
...(display?.text ? { text: toBoolean(display?.text) } : {}),
|
||||
...(display?.warning ? { warning: Number.parseInt(display?.warning, 10) } : {}),
|
||||
...(display?.critical ? { critical: Number.parseInt(display?.critical, 10) } : {}),
|
||||
...(display?.hot ? { hot: Number.parseInt(display?.hot, 10) } : {}),
|
||||
...(display?.max ? { max: Number.parseInt(display?.max, 10) } : {}),
|
||||
locale: display?.locale ?? 'en_US',
|
||||
}) as RecursivePartial<DynamixConfig>;
|
||||
export const loadDynamixConfigFile = createAsyncThunk<
|
||||
RecursiveNullable<RecursivePartial<DynamixConfig>>,
|
||||
string | undefined
|
||||
>('config/load-dynamix-config-file', async (filePath) => {
|
||||
const store = await import('@app/store');
|
||||
const paths = store.getters.paths();
|
||||
const path = filePath ?? paths['dynamix-config'];
|
||||
const fileExists = await access(path, F_OK)
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
const file = fileExists
|
||||
? parseConfig<RecursivePartial<DynamixConfig>>({
|
||||
filePath: path,
|
||||
type: 'ini',
|
||||
})
|
||||
: {};
|
||||
const { display } = file;
|
||||
return merge(file, {
|
||||
...(display?.scale ? { scale: toBoolean(display?.scale) } : {}),
|
||||
...(display?.tabs ? { tabs: toBoolean(display?.tabs) } : {}),
|
||||
...(display?.resize ? { resize: toBoolean(display?.resize) } : {}),
|
||||
...(display?.wwn ? { wwn: toBoolean(display?.wwn) } : {}),
|
||||
...(display?.total ? { total: toBoolean(display?.total) } : {}),
|
||||
...(display?.usage ? { usage: toBoolean(display?.usage) } : {}),
|
||||
...(display?.text ? { text: toBoolean(display?.text) } : {}),
|
||||
...(display?.warning
|
||||
? { warning: Number.parseInt(display?.warning, 10) }
|
||||
: {}),
|
||||
...(display?.critical
|
||||
? { critical: Number.parseInt(display?.critical, 10) }
|
||||
: {}),
|
||||
...(display?.hot ? { hot: Number.parseInt(display?.hot, 10) } : {}),
|
||||
...(display?.max ? { max: Number.parseInt(display?.max, 10) } : {}),
|
||||
locale: display?.locale ?? 'en_US',
|
||||
}) as RecursivePartial<DynamixConfig>;
|
||||
});
|
||||
|
||||
export const dynamix = createSlice({
|
||||
name: 'dynamix',
|
||||
initialState,
|
||||
reducers: {
|
||||
updateDynamixConfig(state, action: PayloadAction<RecursivePartial<SliceState>>) {
|
||||
return merge(state, action.payload);
|
||||
},
|
||||
},
|
||||
extraReducers(builder) {
|
||||
builder.addCase(loadDynamixConfigFile.pending, (state, _action) => {
|
||||
state.status = FileLoadStatus.LOADING;
|
||||
});
|
||||
name: 'dynamix',
|
||||
initialState,
|
||||
reducers: {
|
||||
updateDynamixConfig(
|
||||
state,
|
||||
action: PayloadAction<RecursivePartial<SliceState>>
|
||||
) {
|
||||
return merge(state, action.payload);
|
||||
},
|
||||
},
|
||||
extraReducers(builder) {
|
||||
builder.addCase(loadDynamixConfigFile.pending, (state) => {
|
||||
state.status = FileLoadStatus.LOADING;
|
||||
});
|
||||
|
||||
builder.addCase(loadDynamixConfigFile.fulfilled, (state, action) => {
|
||||
merge(state, action.payload, { status: FileLoadStatus.LOADED });
|
||||
});
|
||||
builder.addCase(loadDynamixConfigFile.fulfilled, (state, action) => {
|
||||
merge(state, action.payload, { status: FileLoadStatus.LOADED });
|
||||
});
|
||||
|
||||
builder.addCase(loadDynamixConfigFile.rejected, (state, action) => {
|
||||
merge(state, action.payload, { status: FileLoadStatus.FAILED_LOADING });
|
||||
});
|
||||
},
|
||||
builder.addCase(loadDynamixConfigFile.rejected, (state, action) => {
|
||||
merge(state, action.payload, {
|
||||
status: FileLoadStatus.FAILED_LOADING,
|
||||
});
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
export const { updateDynamixConfig } = dynamix.actions;
|
||||
|
||||
@@ -20,7 +20,8 @@ const initialState = {
|
||||
'myservers-env': '/boot/config/plugins/dynamix.my.servers/env' as const,
|
||||
'keyfile-base': resolvePath(process.env.PATHS_KEYFILE_BASE ?? '/boot/config' as const),
|
||||
'machine-id': resolvePath(process.env.PATHS_MACHINE_ID ?? '/var/lib/dbus/machine-id' as const),
|
||||
'log-base': resolvePath('/var/log/unraid-api/' as const)
|
||||
'log-base': resolvePath('/var/log/unraid-api/' as const),
|
||||
'var-run': '/var/run' as const,
|
||||
};
|
||||
|
||||
export const paths = createSlice({
|
||||
|
||||
61
api/src/store/watch/docker-watch.ts
Normal file
61
api/src/store/watch/docker-watch.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import { store } from '@app/store';
|
||||
import { dockerLogger } from '@app/core/log';
|
||||
import { updateDockerState } from '@app/store/modules/docker';
|
||||
import { getDockerContainers } from '@app/core/modules/index';
|
||||
import { ContainerState } from '@app/graphql/generated/api/types';
|
||||
import { docker } from '@app/core/utils/index';
|
||||
import DockerEE from 'docker-event-emitter';
|
||||
import { debounce } from 'lodash';
|
||||
|
||||
const updateContainerCache = async () => {
|
||||
try {
|
||||
await getDockerContainers({ useCache: false });
|
||||
} catch (err) {
|
||||
dockerLogger.warn('Caught error getting containers %o', err)
|
||||
store.dispatch(updateDockerState({ installed: null, running: null, containers: [] }))
|
||||
}
|
||||
};
|
||||
|
||||
const debouncedContainerCacheUpdate = debounce(updateContainerCache, 500);
|
||||
|
||||
export const setupDockerWatch = async (): Promise<DockerEE> => {
|
||||
// Only watch container events equal to start/stop
|
||||
const watchedActions = [
|
||||
'die',
|
||||
'kill',
|
||||
'oom',
|
||||
'pause',
|
||||
'restart',
|
||||
'start',
|
||||
'stop',
|
||||
'unpause',
|
||||
];
|
||||
|
||||
// Create docker event emitter instance
|
||||
dockerLogger.debug('Creating docker event emitter instance');
|
||||
|
||||
const dee = new DockerEE(docker);
|
||||
// On Docker event update info with { apps: { installed, started } }
|
||||
dee.on(
|
||||
'container',
|
||||
async (data: {
|
||||
Type: 'container';
|
||||
Action: 'start' | 'stop';
|
||||
from: string;
|
||||
}) => {
|
||||
// Only listen to container events
|
||||
if (!watchedActions.includes(data.Action)) {
|
||||
return;
|
||||
}
|
||||
dockerLogger.addContext('data', data);
|
||||
dockerLogger.debug(`[${data.from}] ${data.Type}->${data.Action}`);
|
||||
dockerLogger.removeContext('data');
|
||||
await debouncedContainerCacheUpdate()
|
||||
}
|
||||
);
|
||||
// Get docker container count on first start
|
||||
await debouncedContainerCacheUpdate();
|
||||
await dee.start();
|
||||
dockerLogger.debug('Binding to docker events');
|
||||
return dee;
|
||||
};
|
||||
25
api/src/store/watch/var-run-watch.ts
Normal file
25
api/src/store/watch/var-run-watch.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import { dockerLogger } from '@app/core/log';
|
||||
import { getters, store } from '@app/store/index';
|
||||
import { setupDockerWatch } from '@app/store/watch/docker-watch';
|
||||
import { watch } from 'chokidar';
|
||||
import type DockerEE from 'docker-event-emitter';
|
||||
import { updateDockerState } from '@app/store/modules/docker'
|
||||
|
||||
export const setupVarRunWatch = () => {
|
||||
const paths = getters.paths()
|
||||
let dockerWatcher: null | typeof DockerEE = null;
|
||||
watch(paths['var-run'], { ignoreInitial: false }).on('add', async (path) => {
|
||||
if (path === paths['docker-socket']) {
|
||||
dockerLogger.debug('Starting docker watch');
|
||||
dockerWatcher = await setupDockerWatch()
|
||||
}
|
||||
}).on('unlink', (path) => {
|
||||
if (path === paths['docker-socket'] && dockerWatcher) {
|
||||
dockerLogger.debug('Stopping docker watch')
|
||||
dockerWatcher?.stop?.()
|
||||
|
||||
store.dispatch(updateDockerState({ installed: null, running: null, containers: [] }))
|
||||
}
|
||||
})
|
||||
|
||||
}
|
||||
@@ -151,8 +151,8 @@ $upc_translations = [
|
||||
'loading' => _('Loading Connect data'),
|
||||
'displayingLastKnown' => _('Displaying last known server data'),
|
||||
'mothership' => [
|
||||
'connected' => _('Connected to Mothership'),
|
||||
'notConnected' => _('Not Connected to Mothership'),
|
||||
'connected' => _('Connected'),
|
||||
'notConnected' => _('Disconnected'),
|
||||
],
|
||||
'accessLabels' => [
|
||||
'current' => _('Current server'),
|
||||
|
||||
Reference in New Issue
Block a user