mirror of
https://github.com/unraid/api.git
synced 2026-01-04 23:50:37 -06:00
fix: tolowercase calls and upgrade deps (#622)
This commit is contained in:
47
api/.eslintrc.cjs
Normal file
47
api/.eslintrc.cjs
Normal file
@@ -0,0 +1,47 @@
|
||||
/** @type {import('eslint').Linter.Config} */
|
||||
module.exports = {
|
||||
root: true,
|
||||
plugins: [
|
||||
'@typescript-eslint/eslint-plugin',
|
||||
'unused-imports',
|
||||
'eslint-plugin-unicorn',
|
||||
],
|
||||
ignorePatterns: ['src/graphql/generated/**/*.ts', '*.test.ts'],
|
||||
parser: '@typescript-eslint/parser',
|
||||
rules: {
|
||||
'@typescript-eslint/no-redundant-type-constituents': 'off',
|
||||
'@typescript-eslint/no-unsafe-call': 'off',
|
||||
'@typescript-eslint/naming-convention': 'off',
|
||||
'@typescript-eslint/no-unsafe-assignment': 'off',
|
||||
'@typescript-eslint/no-unsafe-return': 'off',
|
||||
'@typescript-eslint/ban-types': 'off',
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
'@typescript-eslint/consistent-type-imports': [
|
||||
'warn',
|
||||
{ fixStyle: 'inline-type-imports' },
|
||||
],
|
||||
'unicorn/numeric-separators-style': [
|
||||
'error',
|
||||
{ number: { minimumDigits: 0, groupLength: 3 } },
|
||||
],
|
||||
'import/no-cycle': 'off', // Change this to "error" to find circular imports
|
||||
'@typescript-eslint/no-use-before-define': ['error'],
|
||||
},
|
||||
overrides: [
|
||||
{
|
||||
files: ['*.ts'],
|
||||
extends: [
|
||||
'eslint:recommended',
|
||||
'plugin:@typescript-eslint/recommended',
|
||||
],
|
||||
parserOptions: {
|
||||
project: true,
|
||||
tsconfigRootDir: __dirname,
|
||||
},
|
||||
rules: {
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
},
|
||||
ignorePatterns: ['tsup.config.ts', 'vite.config.ts']
|
||||
},
|
||||
],
|
||||
};
|
||||
28
api/package-lock.json
generated
28
api/package-lock.json
generated
@@ -48,9 +48,9 @@
|
||||
"graphql-type-uuid": "^0.2.0",
|
||||
"graphql-ws": "^5.12.1",
|
||||
"htpasswd-js": "^1.0.2",
|
||||
"ini": "^4.0.0",
|
||||
"ini": "^4.1.0",
|
||||
"ip": "^1.1.8",
|
||||
"jose": "^4.13.2",
|
||||
"jose": "^4.14.2",
|
||||
"launchdarkly-eventsource": "^1.4.4",
|
||||
"lodash": "^4.17.21",
|
||||
"multi-ini": "^2.2.0",
|
||||
@@ -9744,9 +9744,9 @@
|
||||
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
|
||||
},
|
||||
"node_modules/ini": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ini/-/ini-4.0.0.tgz",
|
||||
"integrity": "sha512-t0ikzf5qkSFqRl1e6ejKBe+Tk2bsQd8ivEkcisyGXsku2t8NvXZ1Y3RRz5vxrDgOrTBOi13CvGsVoI5wVpd7xg==",
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmjs.org/ini/-/ini-4.1.0.tgz",
|
||||
"integrity": "sha512-HLR38RSF2iulAzc3I/sma4CoYxQP844rPYCNfzGDOHqa/YqVlwuuZgBx6M50/X8dKgzk0cm1qRg3+47mK2N+cQ==",
|
||||
"engines": {
|
||||
"node": "^14.17.0 || ^16.13.0 || >=18.0.0"
|
||||
}
|
||||
@@ -10349,9 +10349,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/jose": {
|
||||
"version": "4.13.2",
|
||||
"resolved": "https://registry.npmjs.org/jose/-/jose-4.13.2.tgz",
|
||||
"integrity": "sha512-GMUKtV+l05F6NY/06nM7rucHM6Ktvw6sxnyRqINBNWS/hCM/bBk7kanOEckRP8xtC/jzuGfTRVZvkjjuy+g4dA==",
|
||||
"version": "4.14.2",
|
||||
"resolved": "https://registry.npmjs.org/jose/-/jose-4.14.2.tgz",
|
||||
"integrity": "sha512-Fcbi5lskAiSvs8qhdQBusANZWwyATdp7IxgHJTXiaU74sbVjX9uAw+myDPvI8pNo2wXKHECXCR63hqhRkN/SSQ==",
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/panva"
|
||||
}
|
||||
@@ -23507,9 +23507,9 @@
|
||||
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
|
||||
},
|
||||
"ini": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ini/-/ini-4.0.0.tgz",
|
||||
"integrity": "sha512-t0ikzf5qkSFqRl1e6ejKBe+Tk2bsQd8ivEkcisyGXsku2t8NvXZ1Y3RRz5vxrDgOrTBOi13CvGsVoI5wVpd7xg=="
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmjs.org/ini/-/ini-4.1.0.tgz",
|
||||
"integrity": "sha512-HLR38RSF2iulAzc3I/sma4CoYxQP844rPYCNfzGDOHqa/YqVlwuuZgBx6M50/X8dKgzk0cm1qRg3+47mK2N+cQ=="
|
||||
},
|
||||
"inquirer": {
|
||||
"version": "8.2.5",
|
||||
@@ -23933,9 +23933,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"jose": {
|
||||
"version": "4.13.2",
|
||||
"resolved": "https://registry.npmjs.org/jose/-/jose-4.13.2.tgz",
|
||||
"integrity": "sha512-GMUKtV+l05F6NY/06nM7rucHM6Ktvw6sxnyRqINBNWS/hCM/bBk7kanOEckRP8xtC/jzuGfTRVZvkjjuy+g4dA=="
|
||||
"version": "4.14.2",
|
||||
"resolved": "https://registry.npmjs.org/jose/-/jose-4.14.2.tgz",
|
||||
"integrity": "sha512-Fcbi5lskAiSvs8qhdQBusANZWwyATdp7IxgHJTXiaU74sbVjX9uAw+myDPvI8pNo2wXKHECXCR63hqhRkN/SSQ=="
|
||||
},
|
||||
"joycon": {
|
||||
"version": "3.1.1",
|
||||
|
||||
@@ -96,9 +96,9 @@
|
||||
"graphql-type-uuid": "^0.2.0",
|
||||
"graphql-ws": "^5.12.1",
|
||||
"htpasswd-js": "^1.0.2",
|
||||
"ini": "^4.0.0",
|
||||
"ini": "^4.1.0",
|
||||
"ip": "^1.1.8",
|
||||
"jose": "^4.13.2",
|
||||
"jose": "^4.14.2",
|
||||
"launchdarkly-eventsource": "^1.4.4",
|
||||
"lodash": "^4.17.21",
|
||||
"multi-ini": "^2.2.0",
|
||||
|
||||
19
api/src/cli/commands/status.ts
Normal file
19
api/src/cli/commands/status.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import prettyMs from 'pretty-ms';
|
||||
import pidUsage from 'pidusage';
|
||||
import { cliLogger } from '@app/core/log';
|
||||
import { getUnraidApiPid } from '@app/cli/get-unraid-api-pid';
|
||||
import { setEnv } from '@app/cli/set-env';
|
||||
|
||||
export const status = async () => {
|
||||
setEnv('LOG_TYPE', 'raw');
|
||||
|
||||
// Find all processes called "unraid-api" which aren't this process
|
||||
const unraidApiPid = await getUnraidApiPid();
|
||||
if (!unraidApiPid) {
|
||||
cliLogger.info('Found no running processes.');
|
||||
return;
|
||||
}
|
||||
|
||||
const stats = await pidUsage(unraidApiPid);
|
||||
cliLogger.info(`API has been running for ${prettyMs(stats.elapsed)} and is in "${process.env.ENVIRONMENT ?? 'ERR: Unknown Environment'}" mode!`);
|
||||
};
|
||||
@@ -1,8 +1,8 @@
|
||||
import { config } from '@app/core/config';
|
||||
import { PORT } from '@app/environment';
|
||||
import { type JSONWebKeySet } from 'jose';
|
||||
|
||||
export const getInternalApiAddress = (isHttp = true, nginxPort = 80) => {
|
||||
const envPort = config.port as number | string;
|
||||
const envPort = PORT;
|
||||
const protocol = isHttp ? 'http' : 'ws';
|
||||
|
||||
if (!envPort.toString().includes('.sock')) {
|
||||
|
||||
8
api/src/core/index.ts
Normal file
8
api/src/core/index.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
export * as modules from '@app/core/modules';
|
||||
export * as notifiers from '@app/core/notifiers';
|
||||
export * as utils from '@app/core/utils';
|
||||
export * from '@app/core/bus';
|
||||
export * from '@app/core/log';
|
||||
export * from '@app/core/permission-manager';
|
||||
export * from '@app/core/permissions';
|
||||
export * from '@app/core/pubsub';
|
||||
76
api/src/core/modules/get-services.ts
Normal file
76
api/src/core/modules/get-services.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
/*!
|
||||
* Copyright 2019-2022 Lime Technology Inc. All rights reserved.
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
|
||||
import { getEmhttpdService } from '@app/core/modules/services/get-emhttpd';
|
||||
import { logger } from '@app/core/log';
|
||||
import type { CoreResult, CoreContext } from '@app/core/types';
|
||||
import { getUnraidApiService } from '@app/core/modules/services/get-unraid-api';
|
||||
import { NODE_ENV } from '@app/environment';
|
||||
|
||||
const devNames = [
|
||||
'emhttpd',
|
||||
'rest-api',
|
||||
];
|
||||
|
||||
const coreNames = [
|
||||
'unraid-api',
|
||||
];
|
||||
|
||||
interface Service {
|
||||
online: boolean;
|
||||
uptime: string;
|
||||
version: string;
|
||||
}
|
||||
|
||||
interface ServiceResult extends CoreResult {
|
||||
json: Service;
|
||||
}
|
||||
|
||||
interface ServiceWithName extends Service {
|
||||
name: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add name to services.
|
||||
*
|
||||
* @param services
|
||||
* @param names
|
||||
*/
|
||||
const addNameToService = (services: ServiceResult[], names: string[]): ServiceWithName[] => services.map((service, index) => ({
|
||||
name: names[index],
|
||||
...service.json,
|
||||
}));
|
||||
|
||||
interface Result extends CoreResult {
|
||||
json: ServiceWithName[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all services.
|
||||
*/
|
||||
export const getServices = async (context: CoreContext): Promise<Result> => {
|
||||
const logErrorAndReturnEmptyArray = (error: Error) => {
|
||||
logger.error(error);
|
||||
return [];
|
||||
};
|
||||
|
||||
const devServices: ServiceResult[] = NODE_ENV === 'development' ? await Promise.all([
|
||||
getEmhttpdService(context),
|
||||
]).catch(logErrorAndReturnEmptyArray) as ServiceResult[] : [];
|
||||
|
||||
const coreServices: ServiceResult[] = await Promise.all([
|
||||
getUnraidApiService(context),
|
||||
]).catch(logErrorAndReturnEmptyArray) as ServiceResult[];
|
||||
|
||||
const result = [
|
||||
...addNameToService(devServices, devNames),
|
||||
...addNameToService(coreServices, coreNames),
|
||||
];
|
||||
|
||||
return {
|
||||
text: `Services: ${JSON.stringify(result, null, 2)}`,
|
||||
json: result,
|
||||
};
|
||||
};
|
||||
197
api/src/core/types/states/var.ts
Normal file
197
api/src/core/types/states/var.ts
Normal file
@@ -0,0 +1,197 @@
|
||||
/*!
|
||||
* Copyright 2019-2022 Lime Technology Inc. All rights reserved.
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
|
||||
import { type registrationType, type ArrayState, type RegistrationState } from "@app/graphql/generated/api/types";
|
||||
|
||||
/**
|
||||
* Global vars
|
||||
*/
|
||||
export type Var = {
|
||||
bindMgt: boolean | null;
|
||||
cacheNumDevices: number;
|
||||
cacheSbNumDisks: number;
|
||||
/** Description of your server (displayed in the "webGui"). */
|
||||
comment: string;
|
||||
/** Is the array's config valid. */
|
||||
configValid: boolean;
|
||||
/** @internal used to hold the value for config.error */
|
||||
configState: string;
|
||||
/** Current CSRF token for HTTP requests with emhttpd. */
|
||||
csrfToken: string;
|
||||
defaultFormat: string;
|
||||
/** Default file system for data disks. */
|
||||
defaultFsType: FsType;
|
||||
/** Amount of connected drives (license device count). */
|
||||
deviceCount: number;
|
||||
domain: string;
|
||||
domainLogin: string;
|
||||
domainShort: string;
|
||||
flashGuid: string;
|
||||
flashProduct: string;
|
||||
flashVendor: string;
|
||||
/** Current progress of the {@link ?content=mover | mover}. */
|
||||
fsCopyPrcnt: number;
|
||||
fsNumMounted: number;
|
||||
fsNumUnmountable: number;
|
||||
fsProgress: string;
|
||||
/** Current state of the array. */
|
||||
fsState: string;
|
||||
fsUnmountableMask: string;
|
||||
fuseDirectio: string;
|
||||
fuseDirectioDefault: string;
|
||||
fuseDirectioStatus: string;
|
||||
fuseRemember: string;
|
||||
fuseRememberDefault: string;
|
||||
fuseRememberStatus: string;
|
||||
hideDotFiles: boolean;
|
||||
// JoinStatus
|
||||
localMaster: boolean;
|
||||
/** The local tld to use e.g. `.local`. */
|
||||
localTld: string;
|
||||
/** Absolute file path to the data disks' luks key. */
|
||||
luksKeyfile: string;
|
||||
maxArraysz: number; /** Max amount of data drives allowed in the array. */
|
||||
maxCachesz: number; /** Max amount of cache drives allowed in the array. */
|
||||
mdColor: string;
|
||||
/** The amount of {@link ?content=array#disks-disabled | disabled disks} from the current array. */
|
||||
mdNumDisabled: number;
|
||||
mdNumDisks: number;
|
||||
mdNumErased: number;
|
||||
/** The amount of {@link ?content=array#disks-invalid | invalid disks} from the current array. */
|
||||
mdNumInvalid: number;
|
||||
/** The amount of {@link ?content=array#disks-missing | missing disks} from the current array. */
|
||||
mdNumMissing: number;
|
||||
mdNumNew: number;
|
||||
mdNumStripes: number;
|
||||
mdNumStripesDefault: number;
|
||||
mdNumStripesStatus: string;
|
||||
mdResync: number;
|
||||
mdResyncAction: string;
|
||||
mdResyncCorr: string;
|
||||
mdResyncDb: string;
|
||||
mdResyncDt: string;
|
||||
mdResyncPos: number;
|
||||
mdResyncSize: number;
|
||||
mdState: ArrayState;
|
||||
mdSyncThresh: number;
|
||||
mdSyncThreshDefault: number;
|
||||
mdSyncThreshStatus: string;
|
||||
mdSyncWindow: number;
|
||||
mdSyncWindowDefault: number;
|
||||
mdSyncWindowStatus: string;
|
||||
mdVersion: string;
|
||||
mdWriteMethod: number;
|
||||
mdWriteMethodDefault: string;
|
||||
mdWriteMethodStatus: string;
|
||||
/** Machine hostname. */
|
||||
name: string;
|
||||
// NrRequests
|
||||
nrRequests: number;
|
||||
// NrRequestsDefault
|
||||
nrRequestsDefault: number;
|
||||
// NrRequestsStatus
|
||||
/** NTP Server 1. */
|
||||
ntpServer1: string;
|
||||
/** NTP Server 2. */
|
||||
ntpServer2: string;
|
||||
/** NTP Server 3. */
|
||||
ntpServer3: string;
|
||||
/** NTP Server 4. */
|
||||
ntpServer4: string;
|
||||
pollAttributes: string;
|
||||
pollAttributesDefault: string;
|
||||
pollAttributesStatus: string;
|
||||
/** Port for the webui via HTTP. */
|
||||
port: number;
|
||||
/** Port for SSH daemon. */
|
||||
portssh: number;
|
||||
/** Port for the webui via HTTPS. */
|
||||
portssl: number;
|
||||
/** Port for telnet daemon. */
|
||||
porttelnet: number;
|
||||
queueDepth: string;
|
||||
regCheck: string;
|
||||
regState: RegistrationState;
|
||||
/** Where the registration key is stored. (e.g. "/boot/config/Pro.key") */
|
||||
regFile: string;
|
||||
regGen: string;
|
||||
regGuid: string;
|
||||
regTm: string;
|
||||
regTm2: string;
|
||||
/** Who the current Unraid key is registered to. */
|
||||
regTo: string;
|
||||
/** Which type of key this is. */
|
||||
regTy: registrationType;
|
||||
/** Is the server currently in safe mode. */
|
||||
safeMode: boolean;
|
||||
sbClean: boolean;
|
||||
sbEvents: number;
|
||||
sbName: string;
|
||||
sbNumDisks: number;
|
||||
sbState: string;
|
||||
sbSynced: number;
|
||||
sbSynced2: number;
|
||||
sbSyncErrs: number;
|
||||
sbSyncExit: string;
|
||||
sbUpdated: string;
|
||||
sbVersion: string;
|
||||
security: string;
|
||||
shareAvahiEnabled: boolean;
|
||||
shareAvahiSmbModel: string;
|
||||
shareAvahiSmbName: string;
|
||||
shareCacheEnabled: boolean;
|
||||
shareCacheFloor: string;
|
||||
/** Total number of disk/user shares. */
|
||||
shareCount: number;
|
||||
shareDisk: string;
|
||||
shareInitialGroup: string;
|
||||
shareInitialOwner: string;
|
||||
/** If the {@link ?content=mover | mover} is currently active. */
|
||||
shareMoverActive: boolean;
|
||||
shareMoverLogging: boolean;
|
||||
/** When the share mover script should run. Takes cron format time. */
|
||||
shareMoverSchedule: string;
|
||||
/** Total number of NFS shares. */
|
||||
shareNfsCount: number;
|
||||
shareNfsEnabled: boolean;
|
||||
/** Total number of SMB shares. */
|
||||
shareSmbCount: number;
|
||||
/** Is smb enabled */
|
||||
shareSmbEnabled: boolean;
|
||||
/** Which mode is smb running in? active-directory | workgroup */
|
||||
shareSmbMode: string;
|
||||
shareUser: string;
|
||||
// ShareUserExclude
|
||||
shutdownTimeout: number;
|
||||
/** How long until emhttpd should spin down the data drives in your array. */
|
||||
spindownDelay: number;
|
||||
spinupGroups: boolean;
|
||||
/** Should the array be started by default on boot. */
|
||||
startArray: boolean;
|
||||
/** The default start mode for the server. */
|
||||
startMode: string;
|
||||
/** Which page to start the webGui on. */
|
||||
startPage: string;
|
||||
sysArraySlots: number;
|
||||
sysCacheSlots: number;
|
||||
sysFlashSlots: number;
|
||||
sysModel: string;
|
||||
/** Current timezone. {@link https://en.wikipedia.org/wiki/List_of_tz_database_time_zones | Timezone list}. */
|
||||
timeZone: string;
|
||||
/** Should a NTP server be used for time sync. */
|
||||
useNtp: boolean;
|
||||
/** Should SSH be enabled. */
|
||||
useSsh: boolean;
|
||||
/** If HTTPS should be be enabled in the webui. */
|
||||
useSsl: boolean | null;
|
||||
/** Should telnet be enabled. */
|
||||
useTelnet: boolean;
|
||||
/** The current Unraid version. */
|
||||
version: string;
|
||||
/** The SMB workgroup. */
|
||||
workgroup: string;
|
||||
/** UPNP Setting */
|
||||
useUpnp: boolean;
|
||||
};
|
||||
37
api/src/core/utils/clients/emcmd.ts
Normal file
37
api/src/core/utils/clients/emcmd.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
/*!
|
||||
* Copyright 2019-2022 Lime Technology Inc. All rights reserved.
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
import { got } from 'got'
|
||||
import { logger } from '@app/core/log';
|
||||
import { type LooseObject } from '@app/core/types';
|
||||
import { catchHandlers } from '@app/core/utils/misc/catch-handlers';
|
||||
import { getters } from '@app/store';
|
||||
import { DRY_RUN } from '@app/environment';
|
||||
|
||||
/**
|
||||
* Run a command with emcmd.
|
||||
*/
|
||||
export const emcmd = async (commands: LooseObject) => {
|
||||
const socketPath = getters.paths()['emhttpd-socket'];
|
||||
const { csrfToken } = getters.emhttp().var;
|
||||
|
||||
const url = `http://unix:${socketPath}:/update.htm`;
|
||||
const options = {
|
||||
qs: {
|
||||
...commands,
|
||||
csrf_token: csrfToken,
|
||||
},
|
||||
};
|
||||
|
||||
if (DRY_RUN) {
|
||||
logger.debug(url, options);
|
||||
|
||||
// Ensure we only log on dry-run
|
||||
return;
|
||||
}
|
||||
// Untested, this code is unused right now so going to assume it's probably not working well anyway, swapped
|
||||
// to got to remove this request-promise dependency
|
||||
return got.get(url, { searchParams: { ...commands, csrf_token: csrfToken } }).catch(catchHandlers.emhttpd);
|
||||
// return request.get(url, options).catch(catchHandlers.emhttpd);
|
||||
};
|
||||
103
api/src/core/utils/files/config-file-normalizer.ts
Normal file
103
api/src/core/utils/files/config-file-normalizer.ts
Normal file
@@ -0,0 +1,103 @@
|
||||
import { DynamicRemoteAccessType } from '@app/remoteAccess/types';
|
||||
import {
|
||||
type SliceState as ConfigSliceState,
|
||||
initialState,
|
||||
} from '@app/store/modules/config';
|
||||
import { type RecursivePartial } from '@app/types';
|
||||
import type {
|
||||
MyServersConfig,
|
||||
MyServersConfigMemory,
|
||||
} from '@app/types/my-servers-config';
|
||||
import { isEqual } from 'lodash';
|
||||
|
||||
export type ConfigType = 'flash' | 'memory';
|
||||
type ConfigObject<T> = T extends 'flash'
|
||||
? MyServersConfig
|
||||
: T extends 'memory'
|
||||
? MyServersConfigMemory
|
||||
: never;
|
||||
/**
|
||||
*
|
||||
* @param config Config to read from to create a new formatted server config to write
|
||||
* @param mode 'flash' or 'memory', changes what fields are included in the writeable payload
|
||||
* @returns
|
||||
*/
|
||||
// eslint-disable-next-line complexity
|
||||
export const getWriteableConfig = <T extends ConfigType>(
|
||||
config: ConfigSliceState,
|
||||
mode: T
|
||||
): ConfigObject<T> => {
|
||||
// Get current state
|
||||
const { api, local, notifier, remote, upc, connectionStatus } = config;
|
||||
|
||||
// Create new state
|
||||
// eslint-disable-next-line @typescript-eslint/consistent-type-assertions
|
||||
const newState: ConfigObject<T> = {
|
||||
api: {
|
||||
version: api.version ?? initialState.api.version,
|
||||
...(api.extraOrigins ? { extraOrigins: api.extraOrigins } : {}),
|
||||
},
|
||||
local: {
|
||||
...(local['2Fa'] === 'yes' ? { '2Fa': local['2Fa'] } : {}),
|
||||
...(local.showT2Fa === 'yes' ? { showT2Fa: local.showT2Fa } : {}),
|
||||
},
|
||||
notifier: {
|
||||
apikey: notifier.apikey ?? initialState.notifier.apikey,
|
||||
},
|
||||
remote: {
|
||||
...(remote['2Fa'] === 'yes' ? { '2Fa': remote['2Fa'] } : {}),
|
||||
wanaccess: remote.wanaccess ?? initialState.remote.wanaccess,
|
||||
wanport: remote.wanport ?? initialState.remote.wanport,
|
||||
...(remote.upnpEnabled ? { upnpEnabled: remote.upnpEnabled } : {}),
|
||||
apikey: remote.apikey ?? initialState.remote.apikey,
|
||||
email: remote.email ?? initialState.remote.email,
|
||||
username: remote.username ?? initialState.remote.username,
|
||||
avatar: remote.avatar ?? initialState.remote.avatar,
|
||||
regWizTime: remote.regWizTime ?? initialState.remote.regWizTime,
|
||||
idtoken: remote.idtoken ?? initialState.remote.idtoken,
|
||||
accesstoken: remote.accesstoken ?? initialState.remote.accesstoken,
|
||||
refreshtoken:
|
||||
remote.refreshtoken ?? initialState.remote.refreshtoken,
|
||||
...(mode === 'memory'
|
||||
? {
|
||||
allowedOrigins:
|
||||
remote.allowedOrigins ??
|
||||
initialState.remote.allowedOrigins,
|
||||
}
|
||||
: {}),
|
||||
...(remote.dynamicRemoteAccessType ===
|
||||
DynamicRemoteAccessType.DISABLED
|
||||
? {}
|
||||
: {
|
||||
dynamicRemoteAccessType: remote.dynamicRemoteAccessType,
|
||||
}),
|
||||
},
|
||||
upc: {
|
||||
apikey: upc.apikey ?? initialState.upc.apikey,
|
||||
},
|
||||
...(mode === 'memory'
|
||||
? {
|
||||
connectionStatus: {
|
||||
minigraph:
|
||||
connectionStatus.minigraph ??
|
||||
initialState.connectionStatus.minigraph,
|
||||
...(connectionStatus.upnpStatus
|
||||
? { upnpStatus: connectionStatus.upnpStatus }
|
||||
: {}),
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
} as ConfigObject<T>;
|
||||
return newState;
|
||||
};
|
||||
|
||||
/**
|
||||
* Helper function to convert an object into a normalized config file.
|
||||
* This is used for loading config files and ensure changes have been made before the state is merged.
|
||||
*/
|
||||
export const areConfigsEquivalent = (
|
||||
newConfigFile: RecursivePartial<MyServersConfig>,
|
||||
currentConfig: ConfigSliceState
|
||||
): boolean =>
|
||||
// Enable to view config diffs: logger.debug(getDiff(getWriteableConfig(currentConfig, 'flash'), newConfigFile));
|
||||
isEqual(newConfigFile, getWriteableConfig(currentConfig, 'flash'));
|
||||
145
api/src/core/utils/misc/parse-config.ts
Normal file
145
api/src/core/utils/misc/parse-config.ts
Normal file
@@ -0,0 +1,145 @@
|
||||
/*!
|
||||
* Copyright 2019-2022 Lime Technology Inc. All rights reserved.
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
import { parse as parseIni } from 'ini';
|
||||
import camelCaseKeys from 'camelcase-keys';
|
||||
import { includeKeys } from 'filter-obj';
|
||||
import mapObject from 'map-obj';
|
||||
import { AppError } from '@app/core/errors/app-error';
|
||||
import { accessSync, readFileSync } from 'fs';
|
||||
import { access } from 'fs/promises';
|
||||
import { F_OK } from 'constants';
|
||||
import { extname } from 'path';
|
||||
|
||||
type ConfigType = 'ini' | 'cfg';
|
||||
|
||||
type OptionsWithPath = {
|
||||
/** Relative or absolute file path. */
|
||||
filePath: string;
|
||||
/** If the file is an "ini" or a "cfg". */
|
||||
type?: ConfigType;
|
||||
};
|
||||
|
||||
type OptionsWithLoadedFile = {
|
||||
file: string;
|
||||
type: ConfigType;
|
||||
};
|
||||
|
||||
/**
|
||||
* Converts the following
|
||||
* ```
|
||||
* {
|
||||
* 'ipaddr:0': '0.0.0.0',
|
||||
* 'ipaddr:1': '1.1.1.1'
|
||||
* }
|
||||
* ```
|
||||
* to this.
|
||||
* ```
|
||||
* {
|
||||
* 'ipaddr': ['0.0.0.0', '1.1.1.1']
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
const fixObjectArrays = (object: Record<string, any>) => {
|
||||
// An object of arrays for keys that end in `:${number}`
|
||||
const temporaryArrays = {};
|
||||
|
||||
// An object without any array items
|
||||
const filteredObject = includeKeys(object, (key, value) => {
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-regexp-exec
|
||||
const [, name, index] = [...((key).match(/(.*):(\d+$)/) ?? [])];
|
||||
if (!name || !index) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Create initial array
|
||||
if (!Array.isArray(temporaryArrays[name])) {
|
||||
temporaryArrays[name] = [];
|
||||
}
|
||||
|
||||
// Add value
|
||||
temporaryArrays[name].push(value);
|
||||
|
||||
// Remove the old field
|
||||
return false;
|
||||
});
|
||||
|
||||
return {
|
||||
...filteredObject,
|
||||
...temporaryArrays,
|
||||
};
|
||||
};
|
||||
|
||||
export const fileExists = async (path: string) => access(path, F_OK).then(() => true).catch(() => false);
|
||||
export const fileExistsSync = (path: string) => {
|
||||
try {
|
||||
accessSync(path, F_OK);
|
||||
return true;
|
||||
} catch (error: unknown) {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
export const getExtensionFromPath = (filePath: string): string => extname(filePath);
|
||||
|
||||
const isFilePathOptions = (options: OptionsWithLoadedFile | OptionsWithPath): options is OptionsWithPath => Object.keys(options).includes('filePath');
|
||||
const isFileOptions = (options: OptionsWithLoadedFile | OptionsWithPath): options is OptionsWithLoadedFile => Object.keys(options).includes('file');
|
||||
|
||||
export const loadFileFromPathSync = (filePath: string): string => {
|
||||
if (!fileExistsSync(filePath)) throw new Error(`Failed to load file at path: ${filePath}`);
|
||||
return readFileSync(filePath, 'utf-8').toString();
|
||||
};
|
||||
|
||||
/**
|
||||
*
|
||||
* @param extension File extension
|
||||
* @returns boolean whether extension is ini or cfg
|
||||
*/
|
||||
const isValidConfigExtension = (extension: string): boolean => ['ini', 'cfg'].includes(extension);
|
||||
|
||||
export const parseConfig = <T extends Record<string, any>>(options: OptionsWithLoadedFile | OptionsWithPath): T => {
|
||||
let fileContents: string;
|
||||
let extension: string;
|
||||
|
||||
if (isFilePathOptions(options)) {
|
||||
const { filePath, type } = options;
|
||||
|
||||
const validFile = fileExistsSync(filePath);
|
||||
extension = type ?? getExtensionFromPath(filePath);
|
||||
const validExtension = isValidConfigExtension(extension);
|
||||
|
||||
if (validFile && validExtension) {
|
||||
fileContents = loadFileFromPathSync(options.filePath);
|
||||
} else {
|
||||
throw new AppError(`Invalid File Path: ${options.filePath}, or Extension: ${extension}`);
|
||||
}
|
||||
} else if (isFileOptions(options)) {
|
||||
const { file, type } = options;
|
||||
fileContents = file;
|
||||
const extension = type;
|
||||
if (!isValidConfigExtension(extension)) {
|
||||
throw new AppError(`Invalid Extension for Ini File: ${extension}`);
|
||||
}
|
||||
} else {
|
||||
throw new AppError('Invalid Parameters Passed to ParseConfig');
|
||||
}
|
||||
|
||||
const data: Record<string, any> = parseIni(fileContents);
|
||||
// Remove quotes around keys
|
||||
const dataWithoutQuoteKeys = mapObject(data, (key, value) =>
|
||||
// @SEE: https://stackoverflow.com/a/19156197/2311366
|
||||
[(key).replace(/^"(.+(?="$))"$/, '$1'), value],
|
||||
);
|
||||
|
||||
// Result object with array items as actual arrays
|
||||
const result = Object.fromEntries(
|
||||
Object.entries(dataWithoutQuoteKeys)
|
||||
.map(([key, value]) => [key, typeof value === 'object' ? fixObjectArrays(value) : value]),
|
||||
);
|
||||
|
||||
// Convert all keys to camel case
|
||||
return camelCaseKeys(result, {
|
||||
deep: true,
|
||||
}) as T;
|
||||
};
|
||||
61
api/src/core/utils/shares/get-shares.ts
Normal file
61
api/src/core/utils/shares/get-shares.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
/*!
|
||||
* Copyright 2019-2022 Lime Technology Inc. All rights reserved.
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
|
||||
import { processShare } from '@app/core/utils/shares/process-share';
|
||||
import { AppError } from '@app/core/errors/app-error';
|
||||
import { getters } from '@app/store';
|
||||
import { type DiskShare, type UserShare } from '@app/core/types/states/share';
|
||||
|
||||
interface Filter {
|
||||
name: string;
|
||||
}
|
||||
|
||||
type Overload = {
|
||||
(type: 'disk', filter?: Filter): DiskShare | null;
|
||||
(type: 'disks', filter?: Filter): DiskShare[];
|
||||
(type: 'user', filter?: Filter): UserShare | null;
|
||||
(type: 'users', filter?: Filter): UserShare[];
|
||||
(): { disks: DiskShare[]; users: UserShare[] };
|
||||
};
|
||||
|
||||
/**
|
||||
* Get all share types.
|
||||
*/
|
||||
export const getShares: Overload = (type?: string, filter?: Filter) => {
|
||||
const emhttp = getters.emhttp();
|
||||
const types = {
|
||||
user(name: string) {
|
||||
// If a name was provided find a matching share otherwise return the first share
|
||||
const share = name ? emhttp.shares.find(share => share.name === name) : emhttp.shares[0];
|
||||
if (!share) return null;
|
||||
return processShare('user', share);
|
||||
},
|
||||
users: () => emhttp.shares.map(share => processShare('user', share)),
|
||||
disk(name: string) {
|
||||
const diskShares = emhttp.disks.filter(slot => slot.exportable && slot.name?.startsWith('disk'));
|
||||
|
||||
// If a name was provided find a matching share otherwise return the first share
|
||||
const share = name ? diskShares.find(slot => slot.name === name) : diskShares[0];
|
||||
if (!share) return null;
|
||||
return processShare('disk', share);
|
||||
},
|
||||
disks: () => emhttp.disks.filter(slot => slot.exportable && slot.name?.startsWith('disk')).map(disk => processShare('disk', disk)),
|
||||
};
|
||||
|
||||
// Return a type of share
|
||||
if (type) {
|
||||
if (!Object.keys(types).includes(type)) {
|
||||
throw new AppError(`Unknown type "${type}", valid types are ${Object.keys(types).join(', ')}.`);
|
||||
}
|
||||
|
||||
return types[type](filter?.name);
|
||||
}
|
||||
|
||||
// Return all shares
|
||||
return {
|
||||
users: types.users(),
|
||||
disks: types.disks(),
|
||||
};
|
||||
};
|
||||
50
api/src/core/utils/shares/process-share.ts
Normal file
50
api/src/core/utils/shares/process-share.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
import { getters } from '@app/store';
|
||||
import type { DiskShare, Share, UserShare } from '@app/core/types/states/share';
|
||||
|
||||
const processors = {
|
||||
user(share: Share) {
|
||||
const { cache: _, name, ...rest } = share;
|
||||
const { smbShares, nfsShares } = getters.emhttp();
|
||||
|
||||
// Get each config for the share
|
||||
const { name: __, ...smb } = smbShares.find(share => share.name === name) ?? { name };
|
||||
const { name: ___, ...nfs } = nfsShares.find(share => share.name === name) ?? { name };
|
||||
|
||||
return {
|
||||
name,
|
||||
type: 'user',
|
||||
smb,
|
||||
nfs,
|
||||
...rest,
|
||||
};
|
||||
},
|
||||
disk(share: Slot) {
|
||||
const { smbShares, nfsShares, disks } = getters.emhttp();
|
||||
const { name } = share;
|
||||
const { name: __, ...smb } = smbShares.find(share => share.name === name) ?? { name };
|
||||
const { name: ___, ...nfs } = nfsShares.find(share => share.name === name) ?? { name };
|
||||
const { fsSize, fsFree } = disks.find(slot => slot.name === name) ?? {};
|
||||
|
||||
return {
|
||||
name,
|
||||
type: 'disk',
|
||||
size: Number(fsSize),
|
||||
free: Number(fsFree),
|
||||
smb,
|
||||
nfs,
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
type Overload = {
|
||||
(type: 'disk', share: Slot): DiskShare;
|
||||
(type: 'user', share: Share): UserShare;
|
||||
};
|
||||
|
||||
/**
|
||||
* Process share.
|
||||
*/
|
||||
export const processShare: Overload = (type: string, share: Share | Slot) => {
|
||||
const processor = processors[type];
|
||||
return processor(share);
|
||||
};
|
||||
15
api/src/environment.ts
Normal file
15
api/src/environment.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
export const API_VERSION = process.env.VERSION ?? 'THIS_WILL_BE_REPLACED_WHEN_BUILT';
|
||||
export const NODE_ENV = process.env.NODE_ENV as 'development' | 'test' | 'staging' | 'production';
|
||||
export const environment = {
|
||||
IS_MAIN_PROCESS: false,
|
||||
};
|
||||
export const CHOKIDAR_USEPOLLING = process.env.CHOKIDAR_USEPOLLING === 'true';
|
||||
export const IS_DOCKER = process.env.IS_DOCKER === 'true';
|
||||
export const DEBUG = process.env.DEBUG === 'true';
|
||||
export const INTROSPECTION = process.env.INTROSPECTION === 'true';
|
||||
export const ENVIRONMENT = process.env.ENVIRONMENT as 'production' | 'staging' | 'development'
|
||||
export const GRAPHQL_INTROSPECTION = Boolean(
|
||||
INTROSPECTION ?? DEBUG ?? ENVIRONMENT !== 'production'
|
||||
);
|
||||
export const PORT = process.env.PORT ?? '/var/run/unraid-api.sock';
|
||||
export const DRY_RUN = process.env.DRY_RUN === 'true';
|
||||
@@ -2,7 +2,7 @@ import { apiKeyToUser } from "@app/graphql/index";
|
||||
import { getters } from "@app/store/index";
|
||||
import { execa } from "execa";
|
||||
import { type Response, type Request} from 'express';
|
||||
import { stat, rm } from "fs/promises";
|
||||
import { stat } from "fs/promises";
|
||||
import path from "path";
|
||||
|
||||
export const getLogs = async (req: Request, res: Response) => {
|
||||
|
||||
94
api/src/graphql/func-directive.ts
Normal file
94
api/src/graphql/func-directive.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import get from 'lodash/get';
|
||||
import * as core from '@app/core';
|
||||
import { graphqlLogger } from '@app/core/log';
|
||||
import { mapSchema, getDirective, MapperKind } from '@graphql-tools/utils';
|
||||
import { getCoreModule } from '@app/graphql/index';
|
||||
import type { GraphQLFieldResolver, GraphQLSchema } from 'graphql';
|
||||
import type { User } from '@app/core/types/states/user';
|
||||
|
||||
interface FuncDirective {
|
||||
module: string;
|
||||
data: object;
|
||||
query: any;
|
||||
extractFromResponse: string;
|
||||
}
|
||||
|
||||
const funcDirectiveResolver: (directiveArgs: FuncDirective) => GraphQLFieldResolver<undefined, { user?: User }, { result?: any }> | undefined = ({
|
||||
module: coreModule,
|
||||
data,
|
||||
query,
|
||||
extractFromResponse,
|
||||
}) => async (_, args, context) => {
|
||||
const func = getCoreModule(coreModule);
|
||||
|
||||
const functionContext = {
|
||||
query,
|
||||
data,
|
||||
user: context.user,
|
||||
};
|
||||
|
||||
// Run function
|
||||
const [error, coreMethodResult] = await Promise.resolve(func(functionContext, core))
|
||||
.then(result => [undefined, result])
|
||||
.catch(error_ => {
|
||||
// Ensure we aren't leaking anything in production
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
graphqlLogger.error('Module:', coreModule, 'Error:', error_.message);
|
||||
return [new Error(error_.message)];
|
||||
}
|
||||
|
||||
return [error_];
|
||||
});
|
||||
|
||||
// Bail if we can't get the method to run
|
||||
if (error) {
|
||||
return error;
|
||||
}
|
||||
|
||||
// Get wanted result type or fallback to json
|
||||
const result = coreMethodResult[args.result || 'json'];
|
||||
|
||||
// Allow fields to be extracted
|
||||
if (extractFromResponse) {
|
||||
return get(result, extractFromResponse);
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Get the func directive - this is used to resolve @func directives in the graphql schema
|
||||
* @returns Type definition and schema interceptor to create resolvers for @func directives
|
||||
*/
|
||||
export function getFuncDirective() {
|
||||
const directiveName = 'func';
|
||||
return {
|
||||
funcDirectiveTypeDefs: /* GraphQL */`
|
||||
directive @func(
|
||||
module: String!
|
||||
data: JSON
|
||||
query: JSON
|
||||
result: String
|
||||
extractFromResponse: String
|
||||
) on FIELD_DEFINITION
|
||||
`,
|
||||
funcDirectiveTransformer: (schema: GraphQLSchema): GraphQLSchema => mapSchema(schema, {
|
||||
[MapperKind.MUTATION_ROOT_FIELD](fieldConfig) {
|
||||
const funcDirective = getDirective(schema, fieldConfig, directiveName)?.[0] as FuncDirective | undefined;
|
||||
if (funcDirective?.module) {
|
||||
fieldConfig.resolve = funcDirectiveResolver(funcDirective);
|
||||
}
|
||||
|
||||
return fieldConfig;
|
||||
},
|
||||
[MapperKind.QUERY_ROOT_FIELD](fieldConfig) {
|
||||
const funcDirective = getDirective(schema, fieldConfig, directiveName)?.[0] as FuncDirective | undefined;
|
||||
if (funcDirective?.module) {
|
||||
fieldConfig.resolve = funcDirectiveResolver(funcDirective);
|
||||
}
|
||||
|
||||
return fieldConfig;
|
||||
},
|
||||
}),
|
||||
};
|
||||
}
|
||||
42
api/src/graphql/resolvers/query/index.ts
Normal file
42
api/src/graphql/resolvers/query/index.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
/*!
|
||||
* Copyright 2019-2022 Lime Technology Inc. All rights reserved.
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
import { getArray } from '@app/core/modules/get-array';
|
||||
import { getDockerContainers } from '@app/core/modules/index';
|
||||
import { type QueryResolvers } from '@app/graphql/generated/api/types';
|
||||
import cloud from '@app/graphql/resolvers/query/cloud';
|
||||
import config from '@app/graphql/resolvers/query/config';
|
||||
import crashReportingEnabled from '@app/graphql/resolvers/query/crash-reporting-enabled';
|
||||
import { disksResolver } from '@app/graphql/resolvers/query/disks';
|
||||
import display from '@app/graphql/resolvers/query/display';
|
||||
import flash from '@app/graphql/resolvers/query/flash';
|
||||
import online from '@app/graphql/resolvers/query/online';
|
||||
import owner from '@app/graphql/resolvers/query/owner';
|
||||
import { registration } from '@app/graphql/resolvers/query/registration';
|
||||
import server from '@app/graphql/resolvers/query/server';
|
||||
import { servers } from '@app/graphql/resolvers/query/servers';
|
||||
import twoFactor from '@app/graphql/resolvers/query/two-factor';
|
||||
import { vmsResolver } from '@app/graphql/resolvers/query/vms';
|
||||
|
||||
export const Query: QueryResolvers = {
|
||||
array: getArray,
|
||||
cloud,
|
||||
config,
|
||||
crashReportingEnabled,
|
||||
disks: disksResolver,
|
||||
dockerContainers: getDockerContainers,
|
||||
display,
|
||||
flash,
|
||||
online,
|
||||
owner,
|
||||
registration,
|
||||
server,
|
||||
servers,
|
||||
twoFactor,
|
||||
vms: vmsResolver,
|
||||
info() {
|
||||
// Returns an empty object because the subfield resolvers live at the root (allows for partial fetching)
|
||||
return {};
|
||||
},
|
||||
};
|
||||
40
api/src/graphql/resolvers/query/registration.ts
Normal file
40
api/src/graphql/resolvers/query/registration.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
/*!
|
||||
* Copyright 2021 Lime Technology Inc. All rights reserved.
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
|
||||
import { getKeyFile } from '@app/core/utils/misc/get-key-file';
|
||||
import { ensurePermission } from '@app/core/utils/permissions/ensure-permission';
|
||||
import { type Registration, type QueryResolvers } from '@app/graphql/generated/api/types';
|
||||
import { getters } from '@app/store';
|
||||
import { FileLoadStatus } from '@app/store/types';
|
||||
|
||||
export const registration: QueryResolvers['registration'] = async (_, __, context) => {
|
||||
ensurePermission(context.user, {
|
||||
resource: 'registration',
|
||||
action: 'read',
|
||||
possession: 'any',
|
||||
});
|
||||
|
||||
const emhttp = getters.emhttp();
|
||||
if (emhttp.status !== FileLoadStatus.LOADED || !emhttp.var?.regTy) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const isTrial = emhttp.var.regTy?.toLowerCase() === 'trial';
|
||||
const isExpired = emhttp.var.regTy.includes('expired');
|
||||
|
||||
const registration: Registration = {
|
||||
guid: emhttp.var.regGuid,
|
||||
type: emhttp.var.regTy,
|
||||
state: emhttp.var.regState,
|
||||
// Based on https://github.com/unraid/dynamix.unraid.net/blob/c565217fa8b2acf23943dc5c22a12d526cdf70a1/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/include/state.php#L64
|
||||
expiration:
|
||||
(1_000 * (isTrial || isExpired ? Number(emhttp.var.regTm2) : 0)).toString(),
|
||||
keyFile: {
|
||||
location: emhttp.var.regFile,
|
||||
contents: await getKeyFile(),
|
||||
},
|
||||
};
|
||||
return registration;
|
||||
};
|
||||
93
api/src/graphql/resolvers/subscription/dashboard.ts
Normal file
93
api/src/graphql/resolvers/subscription/dashboard.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
import { dashboardLogger } from '@app/core/log';
|
||||
import { generateData } from '@app/common/dashboard/generate-data';
|
||||
import { pubsub } from '@app/core/pubsub';
|
||||
import { getters, store } from '@app/store';
|
||||
import { saveDataPacket } from '@app/store/modules/dashboard';
|
||||
import { isEqual } from 'lodash';
|
||||
import { GraphQLClient } from '@app/mothership/graphql-client';
|
||||
import { SEND_DASHBOARD_PAYLOAD_MUTATION } from '../../mothership/mutations';
|
||||
import { type DashboardInput } from '../../generated/client/graphql';
|
||||
import { getDiff } from 'json-difference';
|
||||
import { DEBUG } from '@app/environment';
|
||||
import { isApolloError } from '@apollo/client/core';
|
||||
|
||||
const isNumberBetween = (min: number, max: number) => (num: number) => num > min && num < max;
|
||||
|
||||
const logAndReturn = <T>(returnValue: T, logLevel: 'info' | 'debug' | 'trace' | 'error', logLine: string, ...logParams: unknown[]): T => {
|
||||
dashboardLogger[logLevel](logLine, ...logParams);
|
||||
return returnValue;
|
||||
};
|
||||
|
||||
const ONE_MB = 1_024 * 1_024;
|
||||
const ONE_HUNDRED_MB = 100 * ONE_MB;
|
||||
|
||||
const canSendDataPacket = (dataPacket: DashboardInput | null) => {
|
||||
const { lastDataPacketTimestamp, lastDataPacket } = getters.dashboard();
|
||||
// Const { lastDataPacketTimestamp, lastDataPacketString, lastDataPacket } = dashboardStore;
|
||||
if (!dataPacket) return logAndReturn(false, 'error', 'Not sending update to dashboard becuase the data packet is empty');
|
||||
|
||||
// UPDATE - No data packet has been sent since boot
|
||||
if (!lastDataPacketTimestamp) return logAndReturn(true, 'debug', 'Sending update as none have been sent since the API started');
|
||||
|
||||
// NO_UPDATE - This is an exact copy of the last data packet
|
||||
if (isEqual(dataPacket, lastDataPacket)) return logAndReturn(false, 'trace', '[NETWORK] Skipping Update');
|
||||
|
||||
if (!lastDataPacket) return logAndReturn(true, 'debug', 'Sending update as no data packets have been stored in state yet');
|
||||
|
||||
const difference = getDiff(lastDataPacket, dataPacket);
|
||||
|
||||
const oldBytesFree = lastDataPacket.array?.capacity.bytes?.free;
|
||||
const newBytesFree = dataPacket.array?.capacity.bytes?.free;
|
||||
|
||||
if (oldBytesFree && newBytesFree && difference.added.length === 0 && difference.removed.length === 0 && difference.edited.length === 2) {
|
||||
// If size has changed less than 100 MB (and nothing else has changed), don't send an update
|
||||
|
||||
const numberBetweenCheck = isNumberBetween((Number(oldBytesFree) * ONE_MB) - ONE_HUNDRED_MB, (Number(oldBytesFree) * ONE_MB) + ONE_HUNDRED_MB);
|
||||
if (numberBetweenCheck(Number(newBytesFree) * ONE_MB)) {
|
||||
logAndReturn(false, 'info', 'Size has not changed enough to send a new dashboard payload');
|
||||
}
|
||||
}
|
||||
|
||||
return logAndReturn(true, 'trace', 'Sending update because the packets are not equal');
|
||||
};
|
||||
|
||||
export const publishToDashboard = async () => {
|
||||
try {
|
||||
const dataPacket = await generateData();
|
||||
// Only update data on change
|
||||
if (!canSendDataPacket(dataPacket)) return;
|
||||
|
||||
dashboardLogger.debug('New Data Packet Is: %o', dataPacket);
|
||||
|
||||
// Save new data packet
|
||||
store.dispatch(saveDataPacket({ lastDataPacket: dataPacket }));
|
||||
|
||||
// Publish the updated data
|
||||
dashboardLogger.addContext('update', dataPacket);
|
||||
dashboardLogger.trace('Publishing update');
|
||||
dashboardLogger.removeContext('update');
|
||||
|
||||
// Update local clients
|
||||
await pubsub.publish('dashboard', {
|
||||
dashboard: dataPacket,
|
||||
});
|
||||
if (dataPacket) {
|
||||
const client = GraphQLClient.getInstance();
|
||||
if (!client) {
|
||||
throw new Error('Invalid Client');
|
||||
}
|
||||
|
||||
// Update mothership
|
||||
await client.mutate({ mutation: SEND_DASHBOARD_PAYLOAD_MUTATION, variables: { apiKey: getters.config().remote.apikey, data: dataPacket } });
|
||||
} else {
|
||||
dashboardLogger.error('DataPacket Was Empty');
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error && isApolloError(error)) {
|
||||
dashboardLogger.error('Failed publishing with GQL Errors: %s, \nClient Errors: %s', error.graphQLErrors.map(error => error.message).join(','), error.clientErrors.join(', '));
|
||||
}
|
||||
|
||||
if (DEBUG) dashboardLogger.error(error);
|
||||
}
|
||||
};
|
||||
|
||||
317
api/src/graphql/resolvers/subscription/network.ts
Normal file
317
api/src/graphql/resolvers/subscription/network.ts
Normal file
@@ -0,0 +1,317 @@
|
||||
import { GraphQLClient } from '@app/mothership/graphql-client';
|
||||
import { type Nginx } from '@app/core/types/states/nginx';
|
||||
import { type RootState, store, getters } from '@app/store';
|
||||
import { type NetworkInput, URL_TYPE, type AccessUrlInput } from '@app/graphql/generated/client/graphql';
|
||||
import { dashboardLogger, logger } from '@app/core';
|
||||
import { isEqual } from 'lodash';
|
||||
import { SEND_NETWORK_MUTATION } from '@app/graphql/mothership/mutations';
|
||||
import { saveNetworkPacket } from '@app/store/modules/dashboard';
|
||||
import { ApolloError } from '@apollo/client/core/core.cjs';
|
||||
import { AccessUrlInputSchema, NetworkInputSchema } from '@app/graphql/generate/validators';
|
||||
import { ZodError } from 'zod';
|
||||
|
||||
interface UrlForFieldInput {
|
||||
url: string;
|
||||
port?: number;
|
||||
portSsl?: number;
|
||||
}
|
||||
|
||||
interface UrlForFieldInputSecure extends UrlForFieldInput {
|
||||
url: string;
|
||||
portSsl: number;
|
||||
}
|
||||
interface UrlForFieldInputInsecure extends UrlForFieldInput {
|
||||
url: string;
|
||||
port: number;
|
||||
}
|
||||
|
||||
export const getUrlForField = ({ url, port, portSsl }: UrlForFieldInputInsecure | UrlForFieldInputSecure) => {
|
||||
let portToUse = '';
|
||||
let httpMode = 'https://';
|
||||
|
||||
if (!url || url === '') {
|
||||
throw new Error('No URL Provided');
|
||||
}
|
||||
|
||||
if (port) {
|
||||
portToUse = port === 80 ? '' : `:${port}`;
|
||||
httpMode = 'http://';
|
||||
} else if (portSsl) {
|
||||
portToUse = portSsl === 443 ? '' : `:${portSsl}`;
|
||||
httpMode = 'https://';
|
||||
} else {
|
||||
throw new Error(`No ports specified for URL: ${url}`);
|
||||
}
|
||||
|
||||
const urlString = `${httpMode}${url}${portToUse}`;
|
||||
|
||||
try {
|
||||
return new URL(urlString);
|
||||
} catch (error: unknown) {
|
||||
throw new Error(`Failed to parse URL: ${urlString}`);
|
||||
}
|
||||
};
|
||||
|
||||
const fieldIsFqdn = (field: keyof Nginx) => field?.toLowerCase().includes('fqdn');
|
||||
|
||||
export type NginxUrlFields = Extract<keyof Nginx, 'lanIp' | 'lanIp6' | 'lanName' | 'lanMdns' | 'lanFqdn' | 'lanFqdn6' | 'wanFqdn' | 'wanFqdn6'>;
|
||||
|
||||
/**
|
||||
*
|
||||
* @param nginx Nginx Config File
|
||||
* @param field The field to build the URL from
|
||||
* @returns a URL, created from the combination of inputs
|
||||
* @throws Error when the URL cannot be created or the URL is invalid
|
||||
*/
|
||||
export const getUrlForServer = ({ nginx, field }: { nginx: Nginx; field: NginxUrlFields }): URL => {
|
||||
if (nginx[field]) {
|
||||
if (fieldIsFqdn(field)) {
|
||||
return getUrlForField({ url: nginx[field], portSsl: nginx.httpsPort });
|
||||
}
|
||||
|
||||
if (!nginx.sslEnabled) {// Use SSL = no
|
||||
return getUrlForField({ url: nginx[field], port: nginx.httpPort });
|
||||
}
|
||||
|
||||
if (nginx.sslMode === 'yes') {
|
||||
return getUrlForField({ url: nginx[field], portSsl: nginx.httpsPort });
|
||||
}
|
||||
|
||||
if (nginx.sslMode === 'auto') {
|
||||
throw new Error(`Cannot get IP Based URL for field: "${field}" SSL mode auto`);
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`IP URL Resolver: Could not resolve any access URL for field: "${field}", is FQDN?: ${fieldIsFqdn(field)}`);
|
||||
};
|
||||
|
||||
// eslint-disable-next-line complexity
|
||||
export const getServerIps = (state: RootState = store.getState()): { urls: AccessUrlInput[]; errors: Error[] } => {
|
||||
const { nginx } = state.emhttp;
|
||||
const { remote: { wanport } } = state.config;
|
||||
if (!nginx || Object.keys(nginx).length === 0) {
|
||||
return { urls: [], errors: [new Error('Nginx Not Loaded')] };
|
||||
}
|
||||
|
||||
const errors: Error[] = [];
|
||||
const urls: AccessUrlInput[] = [];
|
||||
|
||||
try {
|
||||
// Default URL
|
||||
const defaultUrl = new URL(nginx.defaultUrl);
|
||||
urls.push({
|
||||
name: 'Default',
|
||||
type: URL_TYPE.DEFAULT,
|
||||
ipv4: defaultUrl,
|
||||
ipv6: defaultUrl,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Lan IP URL
|
||||
const lanIp4Url = getUrlForServer({ nginx, field: 'lanIp' });
|
||||
urls.push({
|
||||
name: 'LAN IPv4',
|
||||
type: URL_TYPE.LAN,
|
||||
ipv4: lanIp4Url,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Lan IP6 URL
|
||||
const lanIp6Url = getUrlForServer({ nginx, field: 'lanIp6' });
|
||||
urls.push({
|
||||
name: 'LAN IPv6',
|
||||
type: URL_TYPE.LAN,
|
||||
ipv4: lanIp6Url,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Lan Name URL
|
||||
const lanNameUrl = getUrlForServer({ nginx, field: 'lanName' });
|
||||
urls.push({
|
||||
name: 'LAN Name',
|
||||
type: URL_TYPE.MDNS,
|
||||
ipv4: lanNameUrl,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Lan MDNS URL
|
||||
const lanMdnsUrl = getUrlForServer({ nginx, field: 'lanMdns' });
|
||||
urls.push({
|
||||
name: 'LAN MDNS',
|
||||
type: URL_TYPE.MDNS,
|
||||
ipv4: lanMdnsUrl,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Lan FQDN URL
|
||||
const lanFqdnUrl = getUrlForServer({ nginx, field: 'lanFqdn' });
|
||||
urls.push({
|
||||
name: 'LAN FQDN',
|
||||
type: URL_TYPE.LAN,
|
||||
ipv4: lanFqdnUrl,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Lan FQDN6 URL
|
||||
const lanFqdn6Url = getUrlForServer({ nginx, field: 'lanFqdn6' });
|
||||
urls.push({
|
||||
name: 'LAN FQDNv6',
|
||||
type: URL_TYPE.LAN,
|
||||
ipv6: lanFqdn6Url,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// WAN FQDN URL
|
||||
const wanFqdnUrl = getUrlForField({ url: nginx.wanFqdn, portSsl: Number(wanport || 443) });
|
||||
urls.push({
|
||||
name: 'WAN FQDN',
|
||||
type: URL_TYPE.WAN,
|
||||
ipv4: wanFqdnUrl,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// WAN FQDN6 URL
|
||||
const wanFqdn6Url = getUrlForField({ url: nginx.wanFqdn6, portSsl: Number(wanport) });
|
||||
urls.push({
|
||||
name: 'WAN FQDNv6',
|
||||
type: URL_TYPE.WAN,
|
||||
ipv6: wanFqdn6Url,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
|
||||
for (const wgFqdn of nginx.wgFqdns) {
|
||||
try {
|
||||
// WG FQDN URL
|
||||
const wgFqdnUrl = getUrlForField({ url: wgFqdn.fqdn, portSsl: nginx.httpsPort });
|
||||
urls.push({
|
||||
name: `WG FQDN ${wgFqdn.id}`,
|
||||
type: URL_TYPE.WIREGUARD,
|
||||
ipv4: wgFqdnUrl,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
errors.push(error);
|
||||
} else {
|
||||
logger.warn('Uncaught error in network resolver', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const safeUrls = urls.map((url) => AccessUrlInputSchema().safeParse(url)).reduce<AccessUrlInput[]>((acc, curr) => {
|
||||
if (curr.success) {
|
||||
acc.push(curr.data)
|
||||
} else {
|
||||
errors.push(curr.error)
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
|
||||
return { urls: safeUrls, errors };
|
||||
};
|
||||
|
||||
export const publishNetwork = async () => {
|
||||
try {
|
||||
const client = GraphQLClient.getInstance();
|
||||
|
||||
const datapacket = getServerIps();
|
||||
if (datapacket.errors ) {
|
||||
const zodErrors = datapacket.errors.filter(error => error instanceof ZodError)
|
||||
if (zodErrors.length) {
|
||||
dashboardLogger.warn('Validation Errors Encountered with Network Payload: %s', zodErrors.map(error => error.message).join(','))
|
||||
}
|
||||
}
|
||||
const networkPacket: NetworkInput = { accessUrls: datapacket.urls }
|
||||
const validatedNetwork = NetworkInputSchema().parse(networkPacket);
|
||||
|
||||
const { lastNetworkPacket } = getters.dashboard();
|
||||
const { apikey: apiKey } = getters.config().remote;
|
||||
if (isEqual(JSON.stringify(lastNetworkPacket), JSON.stringify(validatedNetwork))) {
|
||||
dashboardLogger.trace('[DASHBOARD] Skipping Update');
|
||||
} else if (client) {
|
||||
dashboardLogger.addContext('data', validatedNetwork);
|
||||
dashboardLogger.info('Sending data packet for network');
|
||||
dashboardLogger.removeContext('data');
|
||||
const result = await client.mutate({
|
||||
mutation: SEND_NETWORK_MUTATION,
|
||||
variables: {
|
||||
apiKey,
|
||||
data: validatedNetwork,
|
||||
},
|
||||
});
|
||||
dashboardLogger.addContext('sendNetworkResult', result);
|
||||
dashboardLogger.debug('Sent network mutation with %s urls', datapacket.urls.length);
|
||||
dashboardLogger.removeContext('sendNetworkResult');
|
||||
store.dispatch(saveNetworkPacket({ lastNetworkPacket: validatedNetwork }));
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
dashboardLogger.trace('ERROR', error);
|
||||
if (error instanceof ApolloError) {
|
||||
dashboardLogger.error('Failed publishing with GQL Errors: %s, \nClient Errors: %s', error.graphQLErrors.map(error => error.message).join(','), error.clientErrors.join(', '));
|
||||
} else {
|
||||
dashboardLogger.error(error);
|
||||
}
|
||||
}
|
||||
};
|
||||
105
api/src/index.ts
Normal file
105
api/src/index.ts
Normal file
@@ -0,0 +1,105 @@
|
||||
/*!
|
||||
* Copyright 2019-2022 Lime Technology Inc. All rights reserved.
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
import 'reflect-metadata';
|
||||
import { am } from 'am';
|
||||
import http from 'http';
|
||||
import https from 'https';
|
||||
import CacheableLookup from 'cacheable-lookup';
|
||||
import exitHook from 'async-exit-hook';
|
||||
import { store } from '@app/store';
|
||||
import { loadConfigFile } from '@app/store/modules/config';
|
||||
import { logger } from '@app/core/log';
|
||||
import { startStoreSync } from '@app/store/store-sync';
|
||||
import { loadStateFiles } from '@app/store/modules/emhttp';
|
||||
import { StateManager } from '@app/store/watch/state-watch';
|
||||
import { setupRegistrationKeyWatch } from '@app/store/watch/registration-watch';
|
||||
import { loadRegistrationKey } from '@app/store/modules/registration';
|
||||
import { createApolloExpressServer } from '@app/server';
|
||||
import { unlinkSync } from 'fs';
|
||||
import { fileExistsSync } from '@app/core/utils/files/file-exists';
|
||||
import { setupDockerWatch } from '@app/store/watch/docker-watch';
|
||||
import { PORT, environment } from '@app/environment';
|
||||
import { shutdownApiEvent } from '@app/store/actions/shutdown-api-event';
|
||||
import { PingTimeoutJobs } from '@app/mothership/jobs/ping-timeout-jobs';
|
||||
import { type BaseContext, type ApolloServer } from '@apollo/server';
|
||||
|
||||
let server: ApolloServer<BaseContext>;
|
||||
|
||||
const unlinkUnixPort = () => {
|
||||
if (isNaN(parseInt(PORT, 10))) {
|
||||
if (fileExistsSync(PORT)) unlinkSync(PORT);
|
||||
}
|
||||
};
|
||||
// Boot app
|
||||
void am(
|
||||
async () => {
|
||||
environment.IS_MAIN_PROCESS = true;
|
||||
const cacheable = new CacheableLookup();
|
||||
|
||||
Object.assign(global, { WebSocket: require('ws') });
|
||||
// Ensure all DNS lookups are cached for their TTL
|
||||
cacheable.install(http.globalAgent);
|
||||
cacheable.install(https.globalAgent);
|
||||
|
||||
// Start file <-> store sync
|
||||
// Must occur before config is loaded to ensure that the handler can fix broken configs
|
||||
await startStoreSync();
|
||||
|
||||
// Load my servers config file into store
|
||||
await store.dispatch(loadConfigFile());
|
||||
|
||||
// Load emhttp state into store
|
||||
await store.dispatch(loadStateFiles());
|
||||
|
||||
// Load initial registration key into store
|
||||
await store.dispatch(loadRegistrationKey());
|
||||
|
||||
// Start listening to file updates
|
||||
StateManager.getInstance();
|
||||
|
||||
// Start listening to key file changes
|
||||
setupRegistrationKeyWatch();
|
||||
|
||||
// Start listening to docker events
|
||||
setupDockerWatch();
|
||||
|
||||
// Try and load the HTTP server
|
||||
logger.debug('Starting HTTP server');
|
||||
|
||||
// Disabled until we need the access token to work
|
||||
// TokenRefresh.init();
|
||||
|
||||
// If port is unix socket, delete old socket before starting http server
|
||||
unlinkUnixPort();
|
||||
|
||||
// Start webserver
|
||||
server = await createApolloExpressServer();
|
||||
|
||||
PingTimeoutJobs.init();
|
||||
|
||||
// On process exit stop HTTP server - this says it supports async but it doesnt seem to
|
||||
exitHook(() => {
|
||||
// If port is unix socket, delete socket before exiting
|
||||
unlinkUnixPort();
|
||||
|
||||
shutdownApiEvent();
|
||||
process.exitCode = 0;
|
||||
});
|
||||
},
|
||||
async (error: NodeJS.ErrnoException) => {
|
||||
// Log error to syslog
|
||||
logger.error('API-GLOBAL-ERROR', error);
|
||||
shutdownApiEvent();
|
||||
|
||||
// Stop server
|
||||
logger.debug('Stopping HTTP server');
|
||||
if (server) {
|
||||
await server.stop();
|
||||
}
|
||||
|
||||
// Kill application
|
||||
process.exitCode = 1;
|
||||
}
|
||||
);
|
||||
69
api/src/originMiddleware.ts
Normal file
69
api/src/originMiddleware.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import { type NextFunction, type Request, type Response } from 'express';
|
||||
import { logger } from '@app/core';
|
||||
import { getAllowedOrigins } from '@app/common/allowed-origins';
|
||||
|
||||
const getOriginGraphqlError = () => ({
|
||||
data: null,
|
||||
errors: [
|
||||
{
|
||||
message:
|
||||
'The CORS policy for this site does not allow access from the specified Origin.',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
/**
|
||||
* Middleware to check a users origin and send a GraphQL error if they are not using a valid one
|
||||
* @param req Express Request
|
||||
* @param res Express Response
|
||||
* @param next Express NextFunction
|
||||
* @returns void
|
||||
*/
|
||||
export const originMiddleware = (
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): void => {
|
||||
// Dev Mode Bypass
|
||||
const origin = req.get('Origin')?.toLowerCase() ?? '';
|
||||
const allowedOrigins = getAllowedOrigins();
|
||||
|
||||
if (process.env.BYPASS_CORS_CHECKS === 'true') {
|
||||
logger.addContext('cors', allowedOrigins);
|
||||
logger.warn(`BYPASSING_CORS_CHECK: %o`, req.headers);
|
||||
logger.removeContext('cors');
|
||||
next();
|
||||
return;
|
||||
} else {
|
||||
logger.addContext('origins', allowedOrigins.join(', '))
|
||||
logger.trace(
|
||||
`Current Origin: ${
|
||||
origin ?? 'undefined'
|
||||
}`
|
||||
);
|
||||
logger.removeContext('origins')
|
||||
}
|
||||
|
||||
// Disallow requests with no origin
|
||||
// (like mobile apps, curl requests or viewing /graphql directly)
|
||||
if (!origin) {
|
||||
logger.debug('No origin provided, denying CORS!');
|
||||
res.status(403).send(getOriginGraphqlError());
|
||||
return;
|
||||
}
|
||||
|
||||
logger.trace(`📒 Checking "${origin}" for CORS access.`);
|
||||
|
||||
// Only allow known origins
|
||||
if (!allowedOrigins.includes(origin)) {
|
||||
logger.error(
|
||||
'❌ %s is not in the allowed origins list, denying CORS!',
|
||||
origin
|
||||
);
|
||||
res.status(403).send(getOriginGraphqlError());
|
||||
return;
|
||||
}
|
||||
|
||||
logger.trace('✔️ Origin check passed, granting CORS!');
|
||||
next();
|
||||
};
|
||||
@@ -11,7 +11,7 @@ import http from 'http';
|
||||
import { ApolloServer } from '@apollo/server';
|
||||
import { expressMiddleware } from '@apollo/server/express4';
|
||||
import { ApolloServerPluginDrainHttpServer } from '@apollo/server/plugin/drainHttpServer';
|
||||
import { logger, config, pubsub, graphqlLogger } from '@app/core';
|
||||
import { logger, pubsub, graphqlLogger } from '@app/core';
|
||||
import { verifyTwoFactorToken } from '@app/common/two-factor';
|
||||
import display from '@app/graphql/resolvers/query/display';
|
||||
import { getters } from '@app/store';
|
||||
@@ -23,7 +23,7 @@ import { apiKeyToUser } from '@app/graphql';
|
||||
import { randomUUID } from 'crypto';
|
||||
import { getServerAddress } from '@app/common/get-server-address';
|
||||
import { originMiddleware } from '@app/originMiddleware';
|
||||
import { API_VERSION, GRAPHQL_INTROSPECTION } from '@app/environment';
|
||||
import { API_VERSION, GRAPHQL_INTROSPECTION, PORT } from '@app/environment';
|
||||
import {
|
||||
getBannerPathIfPresent,
|
||||
getCasePathIfPresent,
|
||||
@@ -354,6 +354,6 @@ export const createApolloExpressServer = async () => {
|
||||
}
|
||||
);
|
||||
|
||||
httpServer.listen(config.port);
|
||||
httpServer.listen(PORT);
|
||||
return apolloServer;
|
||||
};
|
||||
|
||||
98
api/src/store/modules/remote-graphql.ts
Normal file
98
api/src/store/modules/remote-graphql.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import { setGraphqlConnectionStatus } from '@app/store/actions/set-minigraph-status';
|
||||
import { logoutUser } from '@app/store/modules/config';
|
||||
import { type PayloadAction, createSlice, isAnyOf } from '@reduxjs/toolkit';
|
||||
import {
|
||||
MOTHERSHIP_CRITICAL_STATUSES,
|
||||
type SubscriptionWithLastPing,
|
||||
} from '@app/store/types';
|
||||
import { remoteAccessLogger } from '@app/core/log';
|
||||
import { addRemoteSubscription } from '@app/store/actions/add-remote-subscription';
|
||||
|
||||
interface RemoteGraphQLStore {
|
||||
subscriptions: Array<SubscriptionWithLastPing>;
|
||||
}
|
||||
|
||||
const initialState: RemoteGraphQLStore = {
|
||||
subscriptions: [],
|
||||
};
|
||||
|
||||
const remoteGraphQLStore = createSlice({
|
||||
name: 'remoteGraphQL',
|
||||
initialState,
|
||||
reducers: {
|
||||
clearSubscription(state, action: PayloadAction<string>) {
|
||||
remoteAccessLogger.debug(
|
||||
'Clearing subscription with SHA %s',
|
||||
action.payload
|
||||
);
|
||||
const subscription = state.subscriptions.find(
|
||||
(sub) => sub.sha256 === action.payload
|
||||
);
|
||||
if (subscription) {
|
||||
subscription.subscription.unsubscribe();
|
||||
state.subscriptions = state.subscriptions.filter(
|
||||
(subscription) => subscription.sha256 !== action.payload
|
||||
);
|
||||
}
|
||||
|
||||
remoteAccessLogger.debug(
|
||||
'Current remote subscriptions: %s',
|
||||
state.subscriptions.length
|
||||
);
|
||||
},
|
||||
renewRemoteSubscription(
|
||||
state,
|
||||
{ payload: { sha256 } }: PayloadAction<{ sha256: string }>
|
||||
) {
|
||||
const subscription = state.subscriptions.find(
|
||||
(sub) => sub.sha256 === sha256
|
||||
);
|
||||
if (subscription) {
|
||||
subscription.lastPing = Date.now();
|
||||
}
|
||||
},
|
||||
},
|
||||
extraReducers(builder) {
|
||||
builder.addCase(addRemoteSubscription.rejected, (_, action) => {
|
||||
if (action.error) {
|
||||
remoteAccessLogger.warn(
|
||||
'Handling Add Remote Sub Error: %s',
|
||||
action.error.message
|
||||
);
|
||||
}
|
||||
});
|
||||
builder.addCase(addRemoteSubscription.fulfilled, (state, action) => {
|
||||
remoteAccessLogger.info(
|
||||
'Successfully added new remote subscription'
|
||||
);
|
||||
state.subscriptions.push({
|
||||
...action.payload,
|
||||
lastPing: Date.now(),
|
||||
});
|
||||
}),
|
||||
builder.addMatcher(
|
||||
isAnyOf(logoutUser.pending, setGraphqlConnectionStatus),
|
||||
(state, action) => {
|
||||
if (
|
||||
(action.payload?.status &&
|
||||
MOTHERSHIP_CRITICAL_STATUSES.includes(
|
||||
action.payload.status
|
||||
)) ||
|
||||
action.type === logoutUser.pending.type
|
||||
) {
|
||||
remoteAccessLogger.debug(
|
||||
'Clearing all active remote subscriptions, minigraph is no longer connected.'
|
||||
);
|
||||
for (const sub of state.subscriptions) {
|
||||
sub.subscription.unsubscribe();
|
||||
}
|
||||
state.subscriptions = [];
|
||||
}
|
||||
}
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
export const { clearSubscription, renewRemoteSubscription } =
|
||||
remoteGraphQLStore.actions;
|
||||
export const remoteGraphQLReducer = remoteGraphQLStore.reducer;
|
||||
322
api/src/store/state-parsers/var.ts
Normal file
322
api/src/store/state-parsers/var.ts
Normal file
@@ -0,0 +1,322 @@
|
||||
import { type IniStringBoolean, type IniStringBooleanOrAuto } from '@app/core/types/ini';
|
||||
import { type FsType } from '@app/core/types/states/var';
|
||||
import { toNumber } from '@app/core/utils';
|
||||
import { ArrayState, RegistrationState, type registrationType } from '@app/graphql/generated/api/types';
|
||||
import type { StateFileToIniParserMap } from '@app/store/types';
|
||||
|
||||
/**
|
||||
* Unraid registration check
|
||||
*/
|
||||
type RegistrationCheck =
|
||||
/** Key file is missing. */
|
||||
'ENOKEYFILE2' |
|
||||
/** Everything is fine. */
|
||||
'';
|
||||
|
||||
/**
|
||||
* Unraid registration type
|
||||
*
|
||||
* Check the {@link https://unraid.net/pricing | pricing page} for up to date info.
|
||||
*/
|
||||
type RegistrationType =
|
||||
/** Missing key file. */
|
||||
'- missing key file' |
|
||||
/** Free trial */
|
||||
'Trial' |
|
||||
/** Up to 6 attached storage devices. */
|
||||
'Basic' |
|
||||
/** Up to 12 attached storage devices. */
|
||||
'Plus' |
|
||||
/** Unlimited attached storage devices. */
|
||||
'Pro';
|
||||
|
||||
type RegistrationState =
|
||||
'TRIAL' |
|
||||
'BASIC' |
|
||||
'PLUS' |
|
||||
'PRO' |
|
||||
'EEXPIRED' |
|
||||
'EGUID' |
|
||||
'EGUID1' |
|
||||
'ETRIAL' |
|
||||
'ENOKEYFILE' |
|
||||
'ENOKEYFILE1' |
|
||||
'ENOKEYFILE2' |
|
||||
'ENOFLASH1' |
|
||||
'ENOFLASH2' |
|
||||
'ENOFLASH3' |
|
||||
'ENOFLASH4' |
|
||||
'ENOFLASH5' |
|
||||
'ENOFLASH6' |
|
||||
'ENOFLASH7' |
|
||||
'EBLACKLISTED' |
|
||||
'EBLACKLISTED1' |
|
||||
'EBLACKLISTED2' |
|
||||
'ENOCONN';
|
||||
|
||||
export type VarIni = {
|
||||
bindMgt: IniStringBooleanOrAuto;
|
||||
cacheNumDevices: string;
|
||||
cacheSbNumDisks: string;
|
||||
comment: string;
|
||||
configValid: string;
|
||||
configState: string;
|
||||
csrfToken: string;
|
||||
defaultFormat: string;
|
||||
defaultFsType: FsType;
|
||||
deviceCount: string;
|
||||
domain: string;
|
||||
domainLogin: string;
|
||||
domainShort: string;
|
||||
flashGuid: string;
|
||||
flashProduct: string;
|
||||
flashVendor: string;
|
||||
fsCopyPrcnt: string;
|
||||
fsNumMounted: string;
|
||||
fsNumUnmountable: string;
|
||||
fsProgress: string;
|
||||
fsState: string;
|
||||
fsUnmountableMask: string;
|
||||
fuseDirectio: string;
|
||||
fuseDirectioDefault: string;
|
||||
fuseDirectioStatus: string;
|
||||
fuseRemember: string;
|
||||
fuseRememberDefault: string;
|
||||
fuseRememberStatus: string;
|
||||
hideDotFiles: string;
|
||||
localMaster: string;
|
||||
localTld: string;
|
||||
luksKeyfile: string;
|
||||
maxArraysz: string;
|
||||
maxCachesz: string;
|
||||
mdColor: string;
|
||||
mdNumDisabled: string;
|
||||
mdNumDisks: string;
|
||||
mdNumErased: string;
|
||||
mdNumInvalid: string;
|
||||
mdNumMissing: string;
|
||||
mdNumNew: string;
|
||||
mdNumStripes: string;
|
||||
mdNumStripesDefault: string;
|
||||
mdNumStripesStatus: string;
|
||||
mdResync: string;
|
||||
mdResyncAction: string;
|
||||
mdResyncCorr: string;
|
||||
mdResyncDb: string;
|
||||
mdResyncDt: string;
|
||||
mdResyncPos: string;
|
||||
mdResyncSize: string;
|
||||
mdState: string;
|
||||
mdSyncThresh: string;
|
||||
mdSyncThreshDefault: string;
|
||||
mdSyncThreshStatus: string;
|
||||
mdSyncWindow: string;
|
||||
mdSyncWindowDefault: string;
|
||||
mdSyncWindowStatus: string;
|
||||
mdVersion: string;
|
||||
mdWriteMethod: string;
|
||||
mdWriteMethodDefault: string;
|
||||
mdWriteMethodStatus: string;
|
||||
name: string;
|
||||
nrRequests: string;
|
||||
nrRequestsDefault: string;
|
||||
ntpServer1: string;
|
||||
ntpServer2: string;
|
||||
ntpServer3: string;
|
||||
ntpServer4: string;
|
||||
pollAttributes: string;
|
||||
pollAttributesDefault: string;
|
||||
pollAttributesStatus: string;
|
||||
port: string;
|
||||
portssh: string;
|
||||
portssl: string;
|
||||
porttelnet: string;
|
||||
queueDepth: string;
|
||||
regCheck: RegistrationCheck;
|
||||
regFile: string;
|
||||
regGen: string;
|
||||
regGuid: string;
|
||||
regTm: string;
|
||||
regTm2: string;
|
||||
regTo: string;
|
||||
regTy: RegistrationType;
|
||||
regState: RegistrationState;
|
||||
safeMode: string;
|
||||
sbClean: string;
|
||||
sbEvents: string;
|
||||
sbName: string;
|
||||
sbNumDisks: string;
|
||||
sbState: string;
|
||||
sbSynced: string;
|
||||
sbSynced2: string;
|
||||
sbSyncErrs: string;
|
||||
sbSyncExit: string;
|
||||
sbUpdated: string;
|
||||
sbVersion: string;
|
||||
security: string;
|
||||
shareAvahiEnabled: string;
|
||||
shareAvahiSmbModel: string;
|
||||
shareAvahiSmbName: string;
|
||||
shareCacheEnabled: string;
|
||||
shareCacheFloor: string;
|
||||
shareCount: string;
|
||||
shareDisk: string;
|
||||
shareInitialGroup: string;
|
||||
shareInitialOwner: string;
|
||||
shareMoverActive: string;
|
||||
shareMoverLogging: string;
|
||||
shareMoverSchedule: string;
|
||||
shareNfsCount: string;
|
||||
shareNfsEnabled: string;
|
||||
shareSmbCount: string;
|
||||
shareSmbEnabled: string;
|
||||
shareUser: string;
|
||||
shareUserExclude: string;
|
||||
shutdownTimeout: string;
|
||||
spindownDelay: string;
|
||||
spinupGroups: string;
|
||||
startArray: string;
|
||||
startMode: string;
|
||||
startPage: string;
|
||||
sysArraySlots: string;
|
||||
sysCacheSlots: string;
|
||||
sysFlashSlots: string;
|
||||
sysModel: string;
|
||||
timeZone: string;
|
||||
useNtp: IniStringBoolean;
|
||||
useSsh: IniStringBoolean;
|
||||
useSsl: IniStringBooleanOrAuto;
|
||||
useTelnet: string;
|
||||
version: string;
|
||||
workgroup: string;
|
||||
useUpnp: IniStringBoolean;
|
||||
};
|
||||
|
||||
const iniBooleanToJsBoolean = (value: string, defaultValue?: boolean) => {
|
||||
if (value === 'no' || value === 'false') {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (value === 'yes' || value === 'true') {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (defaultValue !== undefined) {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
throw new Error(`Value "${value}" is not false/true or no/yes.`);
|
||||
};
|
||||
|
||||
const iniBooleanOrAutoToJsBoolean = (value: IniStringBooleanOrAuto) => {
|
||||
try {
|
||||
// Either it'll return true/false or throw
|
||||
return iniBooleanToJsBoolean((value as IniStringBoolean));
|
||||
} catch {
|
||||
// Auto or null
|
||||
if (value === 'auto') {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`Value "${value as string}" is not auto/no/yes.`);
|
||||
};
|
||||
|
||||
const safeParseMdState = (mdState: string | undefined): ArrayState => {
|
||||
if (!mdState || typeof mdState !== 'string') {
|
||||
return ArrayState.STOPPED;
|
||||
}
|
||||
const stateUpper = mdState.toUpperCase()
|
||||
const attemptedParse =
|
||||
ArrayState[
|
||||
stateUpper.startsWith('ERROR')
|
||||
? stateUpper.split(':')[1]
|
||||
: stateUpper
|
||||
];
|
||||
|
||||
if (!attemptedParse) {
|
||||
return ArrayState.STOPPED
|
||||
}
|
||||
return attemptedParse;
|
||||
}
|
||||
|
||||
export const parse: StateFileToIniParserMap['var'] = iniFile => {
|
||||
return {
|
||||
...iniFile,
|
||||
mdState: safeParseMdState(iniFile.mdState),
|
||||
bindMgt: iniBooleanOrAutoToJsBoolean(iniFile.bindMgt),
|
||||
cacheNumDevices: toNumber(iniFile.cacheNumDevices),
|
||||
cacheSbNumDisks: toNumber(iniFile.cacheSbNumDisks),
|
||||
configValid: iniBooleanToJsBoolean(iniFile.configValid, false),
|
||||
configState: iniFile.configValid,
|
||||
deviceCount: toNumber(iniFile.deviceCount),
|
||||
fsCopyPrcnt: toNumber(iniFile.fsCopyPrcnt),
|
||||
fsNumMounted: toNumber(iniFile.fsNumMounted),
|
||||
fsNumUnmountable: toNumber(iniFile.fsNumUnmountable),
|
||||
hideDotFiles: iniBooleanToJsBoolean(iniFile.hideDotFiles),
|
||||
localMaster: iniBooleanToJsBoolean(iniFile.localMaster),
|
||||
maxArraysz: toNumber(iniFile.maxArraysz),
|
||||
maxCachesz: toNumber(iniFile.maxCachesz),
|
||||
mdNumDisabled: toNumber(iniFile.mdNumDisabled),
|
||||
mdNumDisks: toNumber(iniFile.mdNumDisks),
|
||||
mdNumErased: toNumber(iniFile.mdNumErased),
|
||||
mdNumInvalid: toNumber(iniFile.mdNumInvalid),
|
||||
mdNumMissing: toNumber(iniFile.mdNumMissing),
|
||||
mdNumNew: toNumber(iniFile.mdNumNew),
|
||||
mdNumStripes: toNumber(iniFile.mdNumStripes),
|
||||
mdNumStripesDefault: toNumber(iniFile.mdNumStripesDefault),
|
||||
mdResync: toNumber(iniFile.mdResync),
|
||||
mdResyncPos: toNumber(iniFile.mdResyncPos),
|
||||
mdResyncSize: toNumber(iniFile.mdResyncSize),
|
||||
mdSyncThresh: toNumber(iniFile.mdSyncThresh),
|
||||
mdSyncThreshDefault: toNumber(iniFile.mdSyncThreshDefault),
|
||||
mdSyncWindow: toNumber(iniFile.mdSyncWindow),
|
||||
mdSyncWindowDefault: toNumber(iniFile.mdSyncWindowDefault),
|
||||
mdWriteMethod: toNumber(iniFile.mdWriteMethod),
|
||||
nrRequests: toNumber(iniFile.nrRequests),
|
||||
nrRequestsDefault: toNumber(iniFile.nrRequestsDefault),
|
||||
port: toNumber(iniFile.port),
|
||||
portssh: toNumber(iniFile.portssh),
|
||||
portssl: toNumber(iniFile.portssl),
|
||||
porttelnet: toNumber(iniFile.porttelnet),
|
||||
regCheck: iniFile.regCheck === '' ? 'Valid' : 'Error',
|
||||
regTy: (['Basic', 'Plus', 'Pro', 'Trial'].includes(iniFile.regTy)
|
||||
? iniFile.regTy
|
||||
: 'Invalid'
|
||||
).toUpperCase() as registrationType,
|
||||
// Make sure to use a || not a ?? as regCheck can be an empty string
|
||||
regState: (iniFile.regCheck || iniFile.regTy || '').toUpperCase() ?? RegistrationState.EGUID,
|
||||
safeMode: iniBooleanToJsBoolean(iniFile.safeMode),
|
||||
sbClean: iniBooleanToJsBoolean(iniFile.sbClean),
|
||||
sbEvents: toNumber(iniFile.sbEvents),
|
||||
sbNumDisks: toNumber(iniFile.sbNumDisks),
|
||||
sbSynced: toNumber(iniFile.sbSynced),
|
||||
sbSynced2: toNumber(iniFile.sbSynced2),
|
||||
sbSyncErrs: toNumber(iniFile.sbSyncErrs),
|
||||
shareAvahiEnabled: iniBooleanToJsBoolean(iniFile.shareAvahiEnabled),
|
||||
shareCacheEnabled: iniBooleanToJsBoolean(iniFile.shareCacheEnabled),
|
||||
shareCount: toNumber(iniFile.shareCount),
|
||||
shareMoverActive: iniBooleanToJsBoolean(iniFile.shareMoverActive),
|
||||
shareMoverLogging: iniBooleanToJsBoolean(iniFile.shareMoverLogging),
|
||||
shareNfsCount: toNumber(iniFile.shareNfsCount),
|
||||
shareNfsEnabled: iniBooleanToJsBoolean(iniFile.shareNfsEnabled),
|
||||
shareSmbCount: toNumber(iniFile.shareSmbCount),
|
||||
shareSmbEnabled: ['yes', 'ads'].includes(iniFile.shareSmbEnabled),
|
||||
shareSmbMode:
|
||||
iniFile.shareSmbEnabled === 'ads'
|
||||
? 'active-directory'
|
||||
: 'workgroup',
|
||||
shutdownTimeout: toNumber(iniFile.shutdownTimeout),
|
||||
spindownDelay: toNumber(iniFile.spindownDelay),
|
||||
spinupGroups: iniBooleanToJsBoolean(iniFile.spinupGroups),
|
||||
startArray: iniBooleanToJsBoolean(iniFile.startArray),
|
||||
sysArraySlots: toNumber(iniFile.sysArraySlots),
|
||||
sysCacheSlots: toNumber(iniFile.sysCacheSlots),
|
||||
sysFlashSlots: toNumber(iniFile.sysFlashSlots),
|
||||
useNtp: iniBooleanToJsBoolean(iniFile.useNtp),
|
||||
useSsh: iniBooleanToJsBoolean(iniFile.useSsh),
|
||||
useSsl: iniBooleanOrAutoToJsBoolean(iniFile.useSsl),
|
||||
useTelnet: iniBooleanToJsBoolean(iniFile.useTelnet),
|
||||
useUpnp: iniBooleanToJsBoolean(iniFile.useUpnp),
|
||||
};
|
||||
};
|
||||
83
api/src/store/watch/state-watch.ts
Normal file
83
api/src/store/watch/state-watch.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
import { emhttpLogger } from '@app/core/log';
|
||||
|
||||
import { watch, type FSWatcher, type WatchOptions } from 'chokidar';
|
||||
import { getters, store } from '@app/store';
|
||||
import { StateFileKey } from '@app/store/types';
|
||||
import { parse, join } from 'path';
|
||||
import { loadSingleStateFile } from '@app/store/modules/emhttp';
|
||||
import { CHOKIDAR_USEPOLLING } from '@app/environment';
|
||||
|
||||
// Configure any excluded nchan channels that we support here
|
||||
const excludedWatches: StateFileKey[] = [StateFileKey.devs];
|
||||
|
||||
const chokidarOptionsForStateKey = (key: StateFileKey): WatchOptions => {
|
||||
if ([StateFileKey.disks, StateFileKey.shares].includes(key)) {
|
||||
return {
|
||||
usePolling: true,
|
||||
interval: 10_000,
|
||||
}
|
||||
}
|
||||
return { usePolling: CHOKIDAR_USEPOLLING }
|
||||
}
|
||||
|
||||
export class StateManager {
|
||||
public static instance: StateManager | null = null;
|
||||
private readonly fileWatchers: FSWatcher[] = [];
|
||||
|
||||
private constructor() {
|
||||
this.setupChokidarWatchForState();
|
||||
}
|
||||
|
||||
public static getInstance(): StateManager {
|
||||
if (!StateManager.instance) {
|
||||
StateManager.instance = new StateManager();
|
||||
}
|
||||
|
||||
return StateManager.instance;
|
||||
}
|
||||
|
||||
private getStateFileKeyFromPath(path: string): StateFileKey | undefined {
|
||||
const parsed = parse(path);
|
||||
return StateFileKey[parsed.name];
|
||||
}
|
||||
|
||||
private readonly setupChokidarWatchForState = () => {
|
||||
const { states } = getters.paths();
|
||||
for (const key of Object.values(StateFileKey)) {
|
||||
if (!excludedWatches.includes(key)) {
|
||||
const pathToWatch = join(states, `${key}.ini`);
|
||||
emhttpLogger.debug(
|
||||
'Setting up watch for path: %s',
|
||||
pathToWatch
|
||||
);
|
||||
const stateWatch = watch(pathToWatch, chokidarOptionsForStateKey(key));
|
||||
stateWatch.on('change', async (path) => {
|
||||
const stateFile = this.getStateFileKeyFromPath(path);
|
||||
if (stateFile) {
|
||||
try {
|
||||
emhttpLogger.debug(
|
||||
'Loading state file for %s',
|
||||
stateFile
|
||||
);
|
||||
await store.dispatch(
|
||||
loadSingleStateFile(stateFile)
|
||||
);
|
||||
} catch (error: unknown) {
|
||||
emhttpLogger.error(
|
||||
'Failed to load state file: [%s]\nerror: %o',
|
||||
stateFile,
|
||||
error
|
||||
);
|
||||
}
|
||||
} else {
|
||||
emhttpLogger.trace(
|
||||
'Failed to resolve a stateFileKey from path: %s',
|
||||
path
|
||||
);
|
||||
}
|
||||
});
|
||||
this.fileWatchers.push(stateWatch);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
79
api/src/upnp/helpers.ts
Normal file
79
api/src/upnp/helpers.ts
Normal file
@@ -0,0 +1,79 @@
|
||||
import { ONE_HOUR_SECS, THIRTY_SECONDS_MS } from '@app/consts';
|
||||
import { upnpLogger } from '@app/core/log';
|
||||
import { IS_DOCKER } from '@app/environment';
|
||||
import { convertToFuzzyTime } from '@app/mothership/utils/convert-to-fuzzy-time';
|
||||
import { getters } from '@app/store';
|
||||
import { type LeaseRenewalArgs } from '@app/store/modules/upnp';
|
||||
import { MockUpnpClient } from '@app/upnp/mock-upnp-client';
|
||||
import { Client, type Mapping } from '@runonflux/nat-upnp';
|
||||
|
||||
// If we're in docker mode, load the mock client
|
||||
const upnpClient = IS_DOCKER ? new MockUpnpClient({ timeout: THIRTY_SECONDS_MS }) : new Client({
|
||||
timeout: THIRTY_SECONDS_MS,
|
||||
});
|
||||
|
||||
const PORT_RANGE_MIN = 35_000;
|
||||
const PORT_RANGE_MAX = 65_000;
|
||||
|
||||
export const getWanPortForUpnp = (mappings: Mapping[] | null, minPort = PORT_RANGE_MIN, maxPort = PORT_RANGE_MAX): number | null => {
|
||||
const excludedPorts = mappings?.map(val => val.public.port);
|
||||
// Attempt to get a port 50 times, then fail
|
||||
for (let i = 0; i < 50; i += 1) {
|
||||
const port = convertToFuzzyTime(minPort, maxPort);
|
||||
if (!excludedPorts?.includes(port)) {
|
||||
return port;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param param0 { localPortForUpnp, wanPortForUpnp }
|
||||
* @returns void
|
||||
* @throws Error, if renewal fails
|
||||
*/
|
||||
export const renewUpnpLease = async ({ localPortForUpnp, wanPortForUpnp, serverName }: { localPortForUpnp: number; wanPortForUpnp: number; serverName?: string }): Promise<void> => {
|
||||
upnpLogger.info('Renewing UPNP Lease: Public Port [%s] Local Port [%s]', wanPortForUpnp, localPortForUpnp);
|
||||
const result = await upnpClient.createMapping({
|
||||
public: wanPortForUpnp,
|
||||
private: localPortForUpnp,
|
||||
description: `Unraid Remote Access - ${serverName ?? 'No Server Name Found'}`,
|
||||
ttl: ONE_HOUR_SECS,
|
||||
});
|
||||
upnpLogger.trace('Opening Port Result %o', result);
|
||||
};
|
||||
|
||||
/**
|
||||
* Get the upnp mappings that are already configured for the router
|
||||
* @returns Array of already mapped ports, null if failure occurs
|
||||
*/
|
||||
export const getUpnpMappings = async (): Promise<Mapping[] | null> => {
|
||||
upnpLogger.trace('Fetching UPNP Mappings');
|
||||
|
||||
try {
|
||||
const mappings = await upnpClient.getMappings();
|
||||
|
||||
return mappings;
|
||||
} catch (error: unknown) {
|
||||
upnpLogger.warn(`Caught error [${error instanceof Error ? error.message : 'N/A'}] fetching current UPNP mappings`);
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
/**
|
||||
* Remove a UPNP lease
|
||||
* @param param0 { localPortForUpnp, wanPortForUpnp }
|
||||
* @throws Error if the removal fails
|
||||
*/
|
||||
export const removeUpnpLease = async ({ localPortForUpnp, wanPortForUpnp } = getters.upnp() as LeaseRenewalArgs): Promise<void> => {
|
||||
upnpLogger.warn('Removing UPNP Lease: Public Port [%s] Local Port [%s]', wanPortForUpnp, localPortForUpnp);
|
||||
|
||||
const result = await upnpClient.removeMapping({
|
||||
public: wanPortForUpnp,
|
||||
private: localPortForUpnp,
|
||||
});
|
||||
|
||||
upnpLogger.trace('UPNP Removal Result %o', result);
|
||||
};
|
||||
Reference in New Issue
Block a user