mirror of
https://github.com/unraid/api.git
synced 2026-01-04 23:50:37 -06:00
chore: lint all the files
This commit is contained in:
@@ -5,4 +5,9 @@ module.exports = {
|
||||
'@unraid',
|
||||
"plugin:unicorn/recommended"
|
||||
],
|
||||
rules: {
|
||||
"unicorn/prefer-node-protocol": "error",
|
||||
"unicorn/no-null": "off",
|
||||
"unicorn/prevent-abbreviations": "off"
|
||||
}
|
||||
};
|
||||
@@ -105,7 +105,7 @@ NCHAN=disable \ # Disable nchan polling
|
||||
PATHS_DYNAMIX_CONFIG=$(pwd)/dev/dynamix/dynamix.cfg \ # Dynamix's config file
|
||||
PATHS_MY_SERVERS_CONFIG=$(pwd)/dev/unraid.net/myservers.cfg \ # My servers config file
|
||||
PORT=8500 \ # What port unraid-api should start on (e.g. /var/run/unraid-api.sock or 8000)
|
||||
node dist/cli.cjs --debug # Enable debug logging
|
||||
node dist/cli.js --debug # Enable debug logging
|
||||
```
|
||||
|
||||
## Release
|
||||
|
||||
104
app/cli.ts
104
app/cli.ts
@@ -1,6 +1,6 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { spawn, exec } from 'child_process';
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { spawn, exec } from 'node:child_process';
|
||||
import { parse, ArgsParseOptions, ArgumentConfig } from 'ts-command-line-args';
|
||||
import dotEnv from 'dotenv';
|
||||
import findProcess from 'find-process';
|
||||
@@ -10,14 +10,13 @@ import dedent from 'dedent-tabs';
|
||||
import { version } from '../package.json';
|
||||
import { paths } from './core/paths';
|
||||
import { logger } from './core/log';
|
||||
import packageJson from '../package.json';
|
||||
|
||||
const setEnv = (envName: string, value: any) => {
|
||||
const setEnvironment = (key: string, value: any) => {
|
||||
if (!value || String(value).trim().length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
process.env[envName] = String(value);
|
||||
process.env[key] = String(value);
|
||||
};
|
||||
|
||||
interface Flags {
|
||||
@@ -31,18 +30,22 @@ interface Flags {
|
||||
version?: boolean;
|
||||
}
|
||||
|
||||
const args: ArgumentConfig<Flags> = {
|
||||
const arguments_: ArgumentConfig<Flags> = {
|
||||
command: { type: String, defaultOption: true, optional: true },
|
||||
help: { type: Boolean, optional: true, alias: 'h', description: 'Prints this usage guide.' },
|
||||
debug: { type: Boolean, optional: true, alias: 'd', description: 'Enabled debug mode.' },
|
||||
port: { type: String, optional: true, alias: 'p', description: 'Set the graphql port.' },
|
||||
environment: { type: String, typeLabel: '{underline production/staging/development}', optional: true, description: 'Set the working environment.' },
|
||||
'log-level': { type: (level?: string) => {
|
||||
return ['error', 'warn', 'info', 'debug', 'trace', 'silly'].includes(level ?? '') ? level : undefined;
|
||||
}, typeLabel: '{underline error/warn/info/debug/trace/silly}', optional: true, description: 'Set the log level.' },
|
||||
'log-transport': { type: (transport?: string) => {
|
||||
return ['console', 'syslog'].includes(transport ?? 'console') ? transport : 'console';
|
||||
}, typeLabel: '{underline console/syslog}', optional: true, description: 'Set the log transport. (default=syslog)' },
|
||||
'log-level': {
|
||||
type: (level?: string) => {
|
||||
return ['error', 'warn', 'info', 'debug', 'trace', 'silly'].includes(level ?? '') ? level : undefined;
|
||||
}, typeLabel: '{underline error/warn/info/debug/trace/silly}', optional: true, description: 'Set the log level.'
|
||||
},
|
||||
'log-transport': {
|
||||
type: (transport?: string) => {
|
||||
return ['console', 'syslog'].includes(transport ?? 'console') ? transport : 'console';
|
||||
}, typeLabel: '{underline console/syslog}', optional: true, description: 'Set the log transport. (default=syslog)'
|
||||
},
|
||||
version: { type: Boolean, optional: true, alias: 'v', description: 'Show version.' }
|
||||
};
|
||||
|
||||
@@ -61,7 +64,7 @@ const options: ArgsParseOptions<Flags> = {
|
||||
footerContentSections: [{ header: '', content: 'Copyright © 2021 Lime Technology, Inc.' }]
|
||||
};
|
||||
|
||||
const mainOptions = parse<Flags>(args, { ...options, partial: true, stopAtFirstUnknown: true });
|
||||
const mainOptions = parse<Flags>(arguments_, { ...options, partial: true, stopAtFirstUnknown: true });
|
||||
const commandOptions = (mainOptions as Flags & { _unknown: string[] })._unknown || [];
|
||||
const command: string = (mainOptions as any).command;
|
||||
// Use the env passed by the user, then the flag inline, then default to production
|
||||
@@ -92,18 +95,18 @@ const commands = {
|
||||
process.chdir(paths.get('unraid-api-base')!);
|
||||
|
||||
// Set envs
|
||||
setEnv('DEBUG', mainOptions.debug);
|
||||
setEnv('ENVIRONMENT', getEnvironment());
|
||||
setEnv('LOG_LEVEL', mainOptions['log-level'] ?? (mainOptions.debug ? 'debug' : 'info'));
|
||||
setEnv('LOG_TRANSPORT', mainOptions['log-transport']);
|
||||
setEnv('PORT', mainOptions.port);
|
||||
setEnvironment('DEBUG', mainOptions.debug);
|
||||
setEnvironment('ENVIRONMENT', getEnvironment());
|
||||
setEnvironment('LOG_LEVEL', mainOptions['log-level'] ?? (mainOptions.debug ? 'debug' : 'info'));
|
||||
setEnvironment('LOG_TRANSPORT', mainOptions['log-transport']);
|
||||
setEnvironment('PORT', mainOptions.port);
|
||||
|
||||
console.info(`Starting unraid-api v${packageJson.version as string}`);
|
||||
console.info(`Starting unraid-api v${version}`);
|
||||
console.info(`Connecting to the "${getEnvironment()}" environment.`);
|
||||
|
||||
// Load bundled index file
|
||||
const indexPath = './index.js';
|
||||
require(indexPath);
|
||||
await import(indexPath);
|
||||
|
||||
if (!mainOptions.debug) {
|
||||
if ('_DAEMONIZE_PROCESS' in process.env) {
|
||||
@@ -129,6 +132,7 @@ const commands = {
|
||||
console.log('Daemonized successfully!');
|
||||
|
||||
// Exit cleanly
|
||||
// eslint-disable-next-line unicorn/no-process-exit
|
||||
process.exit(0);
|
||||
}
|
||||
}
|
||||
@@ -161,7 +165,7 @@ const commands = {
|
||||
* Print API version.
|
||||
*/
|
||||
async version() {
|
||||
console.log(`Unraid API v${version as string}`);
|
||||
console.log(`Unraid API v${version}`);
|
||||
},
|
||||
async status() {
|
||||
// Find all processes called "unraid-api" which aren't this process
|
||||
@@ -190,53 +194,43 @@ const commands = {
|
||||
},
|
||||
async 'switch-env'() {
|
||||
const basePath = paths.get('unraid-api-base')!;
|
||||
const envFlashFilePath = paths.get('myservers-env')!;
|
||||
const envFile = await fs.promises.readFile(envFlashFilePath, 'utf-8').catch(() => '');
|
||||
const environmentFlashFilePath = paths.get('myservers-env')!;
|
||||
const environmentFile = await fs.promises.readFile(environmentFlashFilePath, 'utf-8').catch(() => '');
|
||||
|
||||
logger.debug('Checking %s for current ENV, found %s', envFlashFilePath, envFile);
|
||||
logger.debug('Checking %s for current ENV, found %s', environmentFlashFilePath, environmentFile);
|
||||
|
||||
// Match the env file env="production" which would be [0] = env="production", [1] = env and [2] = production
|
||||
const matchArray = /([a-zA-Z]+)=["]*([a-zA-Z]+)["]*/.exec(envFile);
|
||||
const matchArray = /([A-Za-z]+)="*([A-Za-z]+)"*/.exec(environmentFile);
|
||||
// Get item from index 2 of the regex match or return undefined
|
||||
const [,,currentEnvInFile] = matchArray && matchArray.length === 3 ? matchArray : [];
|
||||
const [_, __, currentEnvironmentInFile] = matchArray && matchArray.length === 3 ? matchArray : [];
|
||||
|
||||
let newEnv = 'production';
|
||||
|
||||
// Switch from staging to production
|
||||
if (currentEnvInFile === 'staging') {
|
||||
newEnv = 'production';
|
||||
}
|
||||
|
||||
// Switch from production to staging
|
||||
if (currentEnvInFile === 'production') {
|
||||
newEnv = 'staging';
|
||||
}
|
||||
|
||||
if (currentEnvInFile) {
|
||||
console.info('Switching from "%s" to "%s"...', currentEnvInFile, newEnv);
|
||||
const newEnvironment = currentEnvironmentInFile === 'production' ? 'staging' : 'production';
|
||||
if (currentEnvironmentInFile) {
|
||||
console.info('Switching from "%s" to "%s"...', currentEnvironmentInFile, newEnvironment);
|
||||
} else {
|
||||
console.info('No ENV found, setting env to "production"...');
|
||||
}
|
||||
|
||||
// Write new env to flash
|
||||
const newEnvLine = `env="${newEnv}"`;
|
||||
await fs.promises.writeFile(envFlashFilePath, newEnvLine);
|
||||
logger.debug('Writing %s to %s', newEnvLine, envFlashFilePath);
|
||||
const newEnvironmentLine = `env="${newEnvironment}"`;
|
||||
await fs.promises.writeFile(environmentFlashFilePath, newEnvironmentLine);
|
||||
logger.debug('Writing %s to %s', newEnvironmentLine, environmentFlashFilePath);
|
||||
|
||||
// Copy the new env over to live location before restarting
|
||||
const source = path.join(basePath, `.env.${newEnv}`);
|
||||
const source = path.join(basePath, `.env.${newEnvironment}`);
|
||||
const destination = path.join(basePath, '.env');
|
||||
logger.debug('Copying %s to %s', source, destination);
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
// Use the native cp command to ensure we're outside the virtual file system
|
||||
exec(`cp "${source}" "${destination}"`, error => {
|
||||
if (error) {
|
||||
return reject(error);
|
||||
}
|
||||
exec(`cp "${source}" "${destination}"`, error => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
return;
|
||||
}
|
||||
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
|
||||
// If there's a process running restart it
|
||||
const unraidApiPid = await getUnraidApiPid();
|
||||
@@ -244,7 +238,8 @@ const commands = {
|
||||
console.info('unraid-api is running, restarting...');
|
||||
|
||||
// Restart the process
|
||||
return this.restart();
|
||||
await this.restart();
|
||||
return;
|
||||
}
|
||||
|
||||
console.info('Run "unraid-api start" to start the API.');
|
||||
@@ -255,11 +250,12 @@ async function main() {
|
||||
if (!command) {
|
||||
if (mainOptions.version) {
|
||||
await commands.version();
|
||||
// eslint-disable-next-line unicorn/no-process-exit
|
||||
process.exit();
|
||||
}
|
||||
|
||||
// Run help command
|
||||
parse<Flags>(args, { ...options, partial: true, stopAtFirstUnknown: true, argv: ['-h'] });
|
||||
parse<Flags>(arguments_, { ...options, partial: true, stopAtFirstUnknown: true, argv: ['-h'] });
|
||||
}
|
||||
|
||||
// Unknown command
|
||||
|
||||
@@ -2,7 +2,7 @@ import { config } from './core/config';
|
||||
|
||||
const internalWsAddress = () => {
|
||||
const port = config.get('port') as number | string;
|
||||
return isNaN(port as any) ?
|
||||
return Number.isNaN(port as any) ?
|
||||
// Unix Socket
|
||||
`ws+unix:${port}` :
|
||||
// Numbered port
|
||||
|
||||
@@ -156,7 +156,6 @@ export class ApiManager extends EventEmitter {
|
||||
replace(name: string, key: string, options: KeyOptions) {
|
||||
// Delete existing key
|
||||
// @ts-expect-error
|
||||
// eslint-disable-next-line unicorn/no-null
|
||||
this.keys.items[name] = null;
|
||||
|
||||
// Add new key
|
||||
|
||||
@@ -7,22 +7,19 @@ import { discoveryLogger as log } from '../log';
|
||||
export const listen = async () => {
|
||||
stw
|
||||
.on('up', service => {
|
||||
if (service.type === 'unraid') {
|
||||
if (service.txt?.is_setup === 'false') {
|
||||
const ipv4 = service.addresses.find(address => address.includes('.'));
|
||||
const ipv6 = service.addresses.find(address => address.includes(':'));
|
||||
const ipAddress = ipv4 ?? ipv6;
|
||||
// No ip?
|
||||
if (!ipAddress) {
|
||||
return;
|
||||
}
|
||||
|
||||
log.info(`Found a new local server [${ipAddress}], visit your my servers dashboard to claim.`);
|
||||
if (service.type === 'unraid' && service.txt?.is_setup === 'false') {
|
||||
const ipv4 = service.addresses.find(address => address.includes('.'));
|
||||
const ipv6 = service.addresses.find(address => address.includes(':'));
|
||||
const ipAddress = ipv4 ?? ipv6;
|
||||
// No ip?
|
||||
if (!ipAddress) {
|
||||
return;
|
||||
}
|
||||
|
||||
log.info(`Found a new local server [${ipAddress}], visit your my servers dashboard to claim.`);
|
||||
}
|
||||
// Console.log(`${service.name} is up! (from ${referrer.address}`);
|
||||
})
|
||||
.on('down', (remoteService, _res, referrer) => {
|
||||
.on('down', (remoteService, _response, referrer) => {
|
||||
log.debug(`${remoteService.name} is down! (from ${referrer.address})`);
|
||||
});
|
||||
|
||||
|
||||
@@ -3,13 +3,13 @@
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
|
||||
import { format } from 'util';
|
||||
import { format } from 'node:util';
|
||||
import { AppError } from './app-error';
|
||||
|
||||
/**
|
||||
* Invalid param provided to module
|
||||
*/
|
||||
export class ParamInvalidError extends AppError {
|
||||
export class ParameterInvalidError extends AppError {
|
||||
constructor(parameterName: string, parameter: any) {
|
||||
// Overriding both message and status code.
|
||||
super(format('Param invalid: %s = %s', parameterName, parameter), 500);
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
|
||||
import path from 'path';
|
||||
import path from 'node:path';
|
||||
import packageJson from 'package-json';
|
||||
import dlTgz from 'dl-tgz';
|
||||
import observableToPromise from 'observable-to-promise';
|
||||
@@ -35,19 +35,19 @@ export const addPlugin = async (context: Context): Promise<CoreResult> => {
|
||||
|
||||
// Validation
|
||||
const missingFields = hasFields(context.data, ['name']);
|
||||
if (missingFields.length !== 0) {
|
||||
if (missingFields.length > 0) {
|
||||
// Log first error.
|
||||
throw new FieldMissingError(missingFields[0]);
|
||||
}
|
||||
|
||||
// Get package metadata
|
||||
const { name, version } = context.data;
|
||||
const pkg = await packageJson(name, {
|
||||
const package_ = await packageJson(name, {
|
||||
allVersions: Boolean(version)
|
||||
});
|
||||
|
||||
// Plugin tgz url
|
||||
const latest = pkg.versions[version];
|
||||
const latest = package_.versions[version];
|
||||
const url = latest.dist.tarball;
|
||||
const pluginCwd = paths.get('plugins')!;
|
||||
|
||||
@@ -63,7 +63,7 @@ export const addPlugin = async (context: Context): Promise<CoreResult> => {
|
||||
return {
|
||||
text: 'Plugin added successfully.',
|
||||
json: {
|
||||
pkg
|
||||
pkg: package_
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
@@ -37,7 +37,7 @@ export const addUser = async (context: Context): Promise<CoreResult> => {
|
||||
const { name, description = '', password } = data;
|
||||
const missingFields = hasFields(data, ['name', 'password']);
|
||||
|
||||
if (missingFields.length !== 0) {
|
||||
if (missingFields.length > 0) {
|
||||
// Only log first error.
|
||||
throw new FieldMissingError(missingFields[0]);
|
||||
}
|
||||
|
||||
@@ -22,7 +22,7 @@ export const addDiskToArray = async function (context: CoreContext): Promise<Cor
|
||||
});
|
||||
|
||||
const missingFields = hasFields(data, ['id']);
|
||||
if (missingFields.length !== 0) {
|
||||
if (missingFields.length > 0) {
|
||||
// Just log first error
|
||||
throw new FieldMissingError(missingFields[0]);
|
||||
}
|
||||
|
||||
@@ -31,7 +31,7 @@ export const removeDiskFromArray = async (context: Context): Promise<CoreResult>
|
||||
|
||||
const missingFields = hasFields(data, ['id']);
|
||||
|
||||
if (missingFields.length !== 0) {
|
||||
if (missingFields.length > 0) {
|
||||
// Only log first error
|
||||
throw new FieldMissingError(missingFields[0]);
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
import { CoreContext, CoreResult } from '../../types';
|
||||
import { hasFields, ensurePermission, emcmd, arrayIsRunning, uppercaseFirstChar } from '../../utils';
|
||||
import { AppError, FieldMissingError, ParamInvalidError } from '../../errors';
|
||||
import { AppError, FieldMissingError, ParameterInvalidError } from '../../errors';
|
||||
import { getArray } from '..';
|
||||
|
||||
// @TODO: Fix this not working across node apps
|
||||
@@ -25,7 +25,7 @@ export const updateArray = async (context: CoreContext): Promise<CoreResult> =>
|
||||
|
||||
const missingFields = hasFields(data, ['state']);
|
||||
|
||||
if (missingFields.length !== 0) {
|
||||
if (missingFields.length > 0) {
|
||||
// Only log first error
|
||||
throw new FieldMissingError(missingFields[0]);
|
||||
}
|
||||
@@ -35,7 +35,7 @@ export const updateArray = async (context: CoreContext): Promise<CoreResult> =>
|
||||
const pendingState = nextState === 'stop' ? 'stopping' : 'starting';
|
||||
|
||||
if (!['start', 'stop'].includes(nextState)) {
|
||||
throw new ParamInvalidError('state', nextState);
|
||||
throw new ParameterInvalidError('state', nextState);
|
||||
}
|
||||
|
||||
// Prevent this running multiple times at once
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
*/
|
||||
|
||||
import { CoreContext, CoreResult } from '../../types';
|
||||
import { FieldMissingError, ParamInvalidError } from '../../errors';
|
||||
import { FieldMissingError, ParameterInvalidError } from '../../errors';
|
||||
import { emcmd, ensurePermission } from '../../utils';
|
||||
import { varState } from '../../states';
|
||||
|
||||
@@ -36,7 +36,7 @@ export const updateParityCheck = async (context: Context): Promise<CoreResult> =
|
||||
throw new FieldMissingError('state');
|
||||
}
|
||||
|
||||
const { state: wantedState } = data;
|
||||
const { state: wantedState, correct } = data;
|
||||
const running = varState?.data?.mdResync !== 0;
|
||||
const states = {
|
||||
pause: {
|
||||
@@ -62,11 +62,11 @@ export const updateParityCheck = async (context: Context): Promise<CoreResult> =
|
||||
|
||||
// Only allow states from states object
|
||||
if (!allowedStates.includes(wantedState)) {
|
||||
throw new ParamInvalidError('state', wantedState);
|
||||
throw new ParameterInvalidError('state', wantedState);
|
||||
}
|
||||
|
||||
// Should we write correction to the parity during the check
|
||||
const writeCorrectionsToParity = wantedState === 'start' && data.correct;
|
||||
const writeCorrectionsToParity = wantedState === 'start' && correct;
|
||||
|
||||
await emcmd({
|
||||
startState: 'STARTED',
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
|
||||
import fs from 'fs';
|
||||
import fs from 'node:fs';
|
||||
import camelCaseKeys from 'camelcase-keys';
|
||||
import { paths } from '../../paths';
|
||||
import { docker, catchHandlers, ensurePermission } from '../../utils';
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
|
||||
import { promises as fs } from 'fs';
|
||||
import { promises as fs } from 'node:fs';
|
||||
import { CoreResult, CoreContext } from '../types';
|
||||
import { paths } from '../paths';
|
||||
import { FileMissingError } from '../errors';
|
||||
@@ -47,7 +47,7 @@ export const getParityHistory = async (context: CoreContext): Promise<CoreResult
|
||||
head: ['Date', 'Duration', 'Speed', 'Status', 'Errors']
|
||||
});
|
||||
// Update raw values with strings
|
||||
parityChecks.forEach(check => {
|
||||
for (const check of parityChecks) {
|
||||
const array = Object.values({
|
||||
...check,
|
||||
speed: check.speed ? check.speed : 'Unavailable',
|
||||
@@ -55,7 +55,7 @@ export const getParityHistory = async (context: CoreContext): Promise<CoreResult
|
||||
status: check.status === '-4' ? 'Cancelled' : 'OK'
|
||||
});
|
||||
table.push(array);
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
text: table.toString(),
|
||||
|
||||
@@ -31,18 +31,16 @@ export const getPermissions = async function (context: CoreContext): Promise<Cor
|
||||
}));
|
||||
|
||||
// Get all roles and their scopes
|
||||
const grants = Object.entries(ac.getGrants())
|
||||
const grants = Object.fromEntries(Object.entries(ac.getGrants())
|
||||
.map(([name, grant]) => {
|
||||
// @ts-expect-error
|
||||
const { $extend, ...grants } = grant;
|
||||
return [name, grants];
|
||||
})
|
||||
.reduce((object, {
|
||||
.map(({
|
||||
0: key,
|
||||
1: value
|
||||
}) => Object.assign(object, {
|
||||
[key.toString()]: value
|
||||
}), {});
|
||||
}) => [key.toString(), value]));
|
||||
|
||||
return {
|
||||
text: `Scopes: ${JSON.stringify(scopes, null, 2)}`,
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
*/
|
||||
|
||||
import { CoreContext, CoreResult } from '../types';
|
||||
import { ParamInvalidError } from '../errors';
|
||||
import { ParameterInvalidError } from '../errors';
|
||||
import { Plugin, pluginManager } from '../plugin-manager';
|
||||
import { ensurePermission } from '../utils';
|
||||
|
||||
@@ -32,7 +32,7 @@ export const getPlugins = (context: Readonly<Context>): Result => {
|
||||
const { filter = 'all' } = query;
|
||||
|
||||
if (!['all', 'active', 'inactive'].includes(filter)) {
|
||||
throw new ParamInvalidError('filter', filter);
|
||||
throw new ParameterInvalidError('filter', filter);
|
||||
}
|
||||
|
||||
const plugins = pluginManager.getAllPlugins().map(plugin => {
|
||||
|
||||
@@ -3,10 +3,9 @@
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
|
||||
import { getEmhttpdService, getUnraidApiService } from './services';
|
||||
import { getEmhttpService, getUnraidApiService } from './services';
|
||||
import { coreLogger } from '../log';
|
||||
import { envs } from '../environments';
|
||||
import { NodeService } from '../utils';
|
||||
import { environmentVariables } from '../environments';
|
||||
import { CoreResult, CoreContext } from '../types';
|
||||
|
||||
const devNames = [
|
||||
@@ -18,7 +17,19 @@ const coreNames = [
|
||||
'unraid-api'
|
||||
];
|
||||
|
||||
interface ServiceResult extends CoreResult {
|
||||
interface Uptime {
|
||||
timestamp: string;
|
||||
seconds?: number;
|
||||
}
|
||||
|
||||
interface NodeService {
|
||||
name: string;
|
||||
online?: boolean;
|
||||
uptime: Uptime;
|
||||
version?: string;
|
||||
}
|
||||
|
||||
interface ServiceResult extends CoreResult<NodeService> {
|
||||
json: NodeService;
|
||||
}
|
||||
|
||||
@@ -27,33 +38,36 @@ interface NodeServiceWithName extends NodeService {
|
||||
}
|
||||
|
||||
/**
|
||||
* Add name to services.
|
||||
* Add name to results.
|
||||
*
|
||||
* @param services
|
||||
* @param results
|
||||
* @param names
|
||||
*/
|
||||
const addNameToService = (services: ServiceResult[], names: string[]): NodeServiceWithName[] => {
|
||||
return services.map((service, index) => ({
|
||||
name: names[index],
|
||||
...service.json
|
||||
}));
|
||||
const addNameToResult = (results: Array<Result | ServiceResult>, names: string[]): NodeServiceWithName[] => {
|
||||
return results.map((result, index) => {
|
||||
const { name: _name, ...ResultData } = result.json;
|
||||
return ({
|
||||
name: names[index],
|
||||
...ResultData
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
interface Result extends CoreResult {
|
||||
interface Result extends CoreResult<NodeServiceWithName[]> {
|
||||
json: NodeServiceWithName[];
|
||||
}
|
||||
|
||||
const logErrorAndReturnEmptyArray = (error: Error) => {
|
||||
coreLogger.error(error);
|
||||
return [];
|
||||
};
|
||||
|
||||
/**
|
||||
* Get all services.
|
||||
*/
|
||||
export const getServices = async (context: CoreContext): Promise<Result> => {
|
||||
const logErrorAndReturnEmptyArray = (error: Error) => {
|
||||
coreLogger.error(error);
|
||||
return [];
|
||||
};
|
||||
|
||||
const devServices = envs.NODE_ENV === 'development' ? await Promise.all([
|
||||
getEmhttpdService(context)
|
||||
const devServices = environmentVariables.NODE_ENV === 'development' ? await Promise.all([
|
||||
getEmhttpService(context)
|
||||
]).catch(logErrorAndReturnEmptyArray) : [];
|
||||
|
||||
const coreServices = await Promise.all([
|
||||
@@ -61,8 +75,8 @@ export const getServices = async (context: CoreContext): Promise<Result> => {
|
||||
]).catch(logErrorAndReturnEmptyArray);
|
||||
|
||||
const result = [
|
||||
...addNameToService(devServices, devNames),
|
||||
...addNameToService(coreServices, coreNames)
|
||||
...addNameToResult(devServices, devNames),
|
||||
...addNameToResult(coreServices, coreNames)
|
||||
];
|
||||
|
||||
return {
|
||||
|
||||
@@ -89,7 +89,7 @@ const systemPciDevices = async (): Promise<PciDevice[]> => {
|
||||
const processedDevices = await filterDevices(filteredDevices).then(async devices => {
|
||||
return Promise.all(devices
|
||||
// @ts-expect-error
|
||||
.map(addDeviceClass)
|
||||
.map(device => addDeviceClass(device))
|
||||
.map(async device => {
|
||||
// Attempt to get the current kernel-bound driver for this pci device
|
||||
await isSymlink(`${basePath}${device.id}/driver`).then(symlink => {
|
||||
@@ -136,6 +136,48 @@ const systemAudioDevices = systemPciDevices().then(devices => {
|
||||
return devices.filter(device => device.class === 'audio' && !device.allowed);
|
||||
});
|
||||
|
||||
const parseUsbDevices = (stdout: string) => stdout.split('\n').map(line => {
|
||||
const regex = new RegExp(/^.+: ID (?<id>\S+)(?<name>.*)$/);
|
||||
const result = regex.exec(line);
|
||||
return (result!.groups as unknown as PciDevice);
|
||||
}) || [];
|
||||
|
||||
// Remove boot drive
|
||||
const filterBootDrive = (device: Readonly<PciDevice>): boolean => varState?.data?.flashGuid !== device.guid;
|
||||
|
||||
// Clean up the name
|
||||
const sanitizeVendorName = (device: Readonly<PciDevice>) => {
|
||||
const vendorname = sanitizeVendor(device.vendorname || '');
|
||||
return {
|
||||
...device,
|
||||
vendorname
|
||||
};
|
||||
};
|
||||
|
||||
const parseDeviceLine = (line?: Readonly<string>): { value: string; string: string } => {
|
||||
const emptyLine = { value: '', string: '' };
|
||||
|
||||
// If the line is blank return nothing
|
||||
if (!line) {
|
||||
return emptyLine;
|
||||
}
|
||||
|
||||
// Parse the line
|
||||
const [, _] = line.split(/[\t ]{2,}/).filter(Boolean);
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-regexp-exec
|
||||
const match = _.match(/^(\S+)\s(.*)/)?.slice(1);
|
||||
|
||||
// If there's no match return nothing
|
||||
if (!match) {
|
||||
return emptyLine;
|
||||
}
|
||||
|
||||
return {
|
||||
value: match[0],
|
||||
string: match[1]
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* System usb devices.
|
||||
* @returns Array of USB devices.
|
||||
@@ -150,46 +192,10 @@ const getSystemUSBDevices = async (): Promise<any[]> => {
|
||||
});
|
||||
}).catch(() => []);
|
||||
|
||||
// Remove boot drive
|
||||
const filterBootDrive = (device: Readonly<PciDevice>): boolean => varState?.data?.flashGuid !== device.guid;
|
||||
|
||||
// Remove usb hubs
|
||||
// @ts-expect-error
|
||||
const filterUsbHubs = (device: Readonly<PciDevice>): boolean => !usbHubs.includes(device.id);
|
||||
|
||||
// Clean up the name
|
||||
const sanitizeVendorName = (device: Readonly<PciDevice>) => {
|
||||
const vendorname = sanitizeVendor(device.vendorname || '');
|
||||
return {
|
||||
...device,
|
||||
vendorname
|
||||
};
|
||||
};
|
||||
|
||||
const parseDeviceLine = (line: Readonly<string>): { value: string; string: string } => {
|
||||
const emptyLine = { value: '', string: '' };
|
||||
|
||||
// If the line is blank return nothing
|
||||
if (!line) {
|
||||
return emptyLine;
|
||||
}
|
||||
|
||||
// Parse the line
|
||||
const [, _] = line.split(/[ \t]{2,}/).filter(Boolean);
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-regexp-exec
|
||||
const match = _.match(/^(\S+)\s(.*)/)?.slice(1);
|
||||
|
||||
// If there's no match return nothing
|
||||
if (!match) {
|
||||
return emptyLine;
|
||||
}
|
||||
|
||||
return {
|
||||
value: match[0],
|
||||
string: match[1]
|
||||
};
|
||||
};
|
||||
|
||||
// Add extra fields to device
|
||||
const parseDevice = (device: Readonly<PciDevice>) => {
|
||||
const modifiedDevice: PciDevice = {
|
||||
@@ -197,11 +203,11 @@ const getSystemUSBDevices = async (): Promise<any[]> => {
|
||||
};
|
||||
const info = execa.commandSync(`lsusb -d ${device.id} -v`).stdout.split('\n');
|
||||
const deviceName = device.name.trim();
|
||||
const iSerial = parseDeviceLine(info.filter(line => line.includes('iSerial'))[0]);
|
||||
const iProduct = parseDeviceLine(info.filter(line => line.includes('iProduct'))[0]);
|
||||
const iManufacturer = parseDeviceLine(info.filter(line => line.includes('iManufacturer'))[0]);
|
||||
const idProduct = parseDeviceLine(info.filter(line => line.includes('idProduct'))[0]);
|
||||
const idVendor = parseDeviceLine(info.filter(line => line.includes('idVendor'))[0]);
|
||||
const iSerial = parseDeviceLine(info.find(line => line.includes('iSerial')));
|
||||
const iProduct = parseDeviceLine(info.find(line => line.includes('iProduct')));
|
||||
const iManufacturer = parseDeviceLine(info.find(line => line.includes('iManufacturer')));
|
||||
const idProduct = parseDeviceLine(info.find(line => line.includes('idProduct')));
|
||||
const idVendor = parseDeviceLine(info.find(line => line.includes('idVendor')));
|
||||
const serial = `${iSerial.string.slice(8).slice(0, 4)}-${iSerial.string.slice(8).slice(4)}`;
|
||||
const guid = `${idVendor.value.slice(2)}-${idProduct.value.slice(2)}-${serial}`;
|
||||
|
||||
@@ -226,19 +232,13 @@ const getSystemUSBDevices = async (): Promise<any[]> => {
|
||||
return modifiedDevice;
|
||||
};
|
||||
|
||||
const parseUsbDevices = (stdout: string) => stdout.split('\n').map(line => {
|
||||
const regex = new RegExp(/^.+: ID (?<id>\S+)(?<name>.*)$/);
|
||||
const result = regex.exec(line);
|
||||
return (result!.groups as unknown as PciDevice);
|
||||
}) || [];
|
||||
|
||||
// Get all usb devices
|
||||
const usbDevices = await execa('lsusb').then(async ({ stdout }) => {
|
||||
return parseUsbDevices(stdout)
|
||||
.map(parseDevice)
|
||||
.filter(filterBootDrive)
|
||||
.filter(filterUsbHubs)
|
||||
.map(sanitizeVendorName);
|
||||
.map(device => parseDevice(device))
|
||||
.filter(device => filterBootDrive(device))
|
||||
.filter(device => filterUsbHubs(device))
|
||||
.map(device => sanitizeVendorName(device));
|
||||
});
|
||||
|
||||
return usbDevices;
|
||||
|
||||
@@ -3,13 +3,13 @@
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
|
||||
import { uptime } from 'os';
|
||||
import { uptime } from 'node:os';
|
||||
import si from 'systeminformation';
|
||||
import { CoreContext, CoreResult } from '../../types';
|
||||
import { ensurePermission } from '../../utils';
|
||||
|
||||
// Get uptime on boot and convert to date
|
||||
const bootTimestamp = new Date(new Date().getTime() - (uptime() * 1000));
|
||||
const bootTimestamp = new Date(Date.now() - (uptime() * 1000));
|
||||
|
||||
/**
|
||||
* Get OS info
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
|
||||
import fs from 'fs';
|
||||
import fs from 'node:fs';
|
||||
import semver from 'semver';
|
||||
import { paths } from '../../paths';
|
||||
import { CacheManager } from '../../cache-manager';
|
||||
|
||||
@@ -37,8 +37,12 @@ export const getVmsCount = async function (context: CoreContext): Promise<Result
|
||||
|
||||
try {
|
||||
const hypervisor = await getHypervisor();
|
||||
const activeDomains = await hypervisor.connectListAllDomains(ConnectListAllDomainsFlags.ACTIVE) as [];
|
||||
const inactiveDomains = await hypervisor.connectListAllDomains(ConnectListAllDomainsFlags.INACTIVE) as [];
|
||||
if (!hypervisor) {
|
||||
throw new Error('No Hypervisor');
|
||||
}
|
||||
|
||||
const activeDomains = await hypervisor.connectListAllDomains(ConnectListAllDomainsFlags.ACTIVE) as string[];
|
||||
const inactiveDomains = await hypervisor.connectListAllDomains(ConnectListAllDomainsFlags.INACTIVE) as string[];
|
||||
const installed = activeDomains.length + inactiveDomains.length;
|
||||
const started = activeDomains.length;
|
||||
|
||||
@@ -58,6 +62,6 @@ export const getVmsCount = async function (context: CoreContext): Promise<Result
|
||||
installed,
|
||||
started
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
@@ -15,9 +15,9 @@ interface Result extends CoreResult {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get emhttpd service info.
|
||||
* Get emhttp service info.
|
||||
*/
|
||||
export const getEmhttpdService = async (context: CoreContext): Promise<Result> => {
|
||||
export const getEmhttpService = async (context: CoreContext): Promise<Result> => {
|
||||
const { user } = context;
|
||||
|
||||
// Check permissions
|
||||
@@ -1,4 +1,4 @@
|
||||
// Created from 'create-ts-index'
|
||||
|
||||
export * from './get-emhttpd';
|
||||
export * from './get-emhttp';
|
||||
export * from './get-unraid-api';
|
||||
|
||||
@@ -38,7 +38,7 @@ export const getShare = async function (context: Context): Promise<Result> {
|
||||
const share = [
|
||||
userShare,
|
||||
diskShare
|
||||
].filter(_ => _)[0];
|
||||
].find(_ => _);
|
||||
|
||||
if (!share) {
|
||||
throw new AppError('No share found with that name.', 404);
|
||||
|
||||
@@ -32,7 +32,7 @@ export const addRole = async (context: Context): Promise<CoreResult> => {
|
||||
const { name } = params;
|
||||
const missingFields = hasFields(params, ['name']);
|
||||
|
||||
if (missingFields.length !== 0) {
|
||||
if (missingFields.length > 0) {
|
||||
throw new FieldMissingError(missingFields[0]);
|
||||
}
|
||||
|
||||
|
||||
@@ -30,7 +30,7 @@ export const deleteUser = async (context: Context): Promise<CoreResult> => {
|
||||
const { name } = params;
|
||||
const missingFields = hasFields(params, ['name']);
|
||||
|
||||
if (missingFields.length !== 0) {
|
||||
if (missingFields.length > 0) {
|
||||
// Just throw the first error
|
||||
throw new FieldMissingError(missingFields[0]);
|
||||
}
|
||||
|
||||
@@ -59,7 +59,7 @@ export const getDomains = async (context: CoreContext): Promise<CoreResult> => {
|
||||
text: `Defined domains: ${JSON.stringify(activeDomainNames, null, 2)}\nActive domains: ${JSON.stringify(inactiveDomainNames, null, 2)}`,
|
||||
json: resolvedDomains
|
||||
};
|
||||
} catch (error: unknown) {
|
||||
} catch {
|
||||
// If we hit an error expect libvirt to be offline
|
||||
return {
|
||||
text: `Defined domains: ${JSON.stringify([], null, 2)}\nActive domains: ${JSON.stringify([], null, 2)}`,
|
||||
|
||||
@@ -34,7 +34,7 @@ export class EmailNotifier extends Notifier {
|
||||
}
|
||||
|
||||
send(options: SendOptions) {
|
||||
const { type = 'generic', title = 'Unraid Server Notification' } = options;
|
||||
const { type = 'generic', title = 'Unraid Server Notification', data, ...renderOptions } = options;
|
||||
const { to, from, replyTo, level } = this;
|
||||
// Only show info when in debug
|
||||
const silent = level !== 'debug';
|
||||
@@ -50,7 +50,8 @@ export class EmailNotifier extends Notifier {
|
||||
|
||||
// Render template
|
||||
this.template = Object.keys(templates).includes(type) ? templates[type] : templates.generic;
|
||||
const html = this.render({ ...options, json: JSON.stringify(options.data, null, 2) }, this.helpers);
|
||||
// eslint-disable-next-line unicorn/consistent-destructuring
|
||||
const html = this.render({ type, title, ...renderOptions, json: JSON.stringify(data, null, 2) }, this.helpers);
|
||||
|
||||
return sendMail({
|
||||
from,
|
||||
|
||||
@@ -25,6 +25,8 @@ export interface NotifierSendOptions {
|
||||
computed: LooseObject;
|
||||
}
|
||||
|
||||
const generateHelper = (func: (text: string) => string) => (text: string, render: (text: string) => string) => func(render(text));
|
||||
|
||||
/**
|
||||
* Base notifier.
|
||||
* @param Alert level.
|
||||
@@ -70,6 +72,6 @@ export class Notifier {
|
||||
* @param func Function to be wrapped.
|
||||
*/
|
||||
generateHelper(func: (text: string) => string) {
|
||||
return () => (text: string, render: (text: string) => string) => func(render(text));
|
||||
return generateHelper(func);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,8 +31,8 @@ export class UnraidNotifier extends HttpNotifier {
|
||||
*/
|
||||
async send(options: NotifierSendOptions) {
|
||||
const { endpoint, transport } = this;
|
||||
const { type = 'generic', title = 'Unraid Server Notification' } = options;
|
||||
const { ...body } = options.data;
|
||||
const { type = 'generic', title = 'Unraid Server Notification', data } = options;
|
||||
const { ...body } = data;
|
||||
|
||||
const headers = {
|
||||
Accept: 'application/json, text/plain, */*',
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
|
||||
import path from 'path';
|
||||
import path from 'node:path';
|
||||
import mm from 'micromongo';
|
||||
import { paths } from '../paths';
|
||||
import { parseConfig } from '../utils/misc';
|
||||
@@ -60,6 +60,7 @@ class Devices extends ArrayState {
|
||||
}
|
||||
|
||||
find(query?: LooseObject): Device[] {
|
||||
// eslint-disable-next-line unicorn/no-array-callback-reference
|
||||
return super.find(query);
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
|
||||
import path from 'path';
|
||||
import path from 'node:path';
|
||||
import mm from 'micromongo';
|
||||
import { paths } from '../paths';
|
||||
import { LooseObject, IniStringBoolean, CommaSeparatedString } from '../types';
|
||||
@@ -105,6 +105,7 @@ class Network extends ArrayState {
|
||||
}
|
||||
|
||||
find(query?: LooseObject): Network[] {
|
||||
// eslint-disable-next-line unicorn/no-array-callback-reference
|
||||
return super.find(query);
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
|
||||
import path from 'path';
|
||||
import path from 'node:path';
|
||||
import { paths } from '../paths';
|
||||
import { ArrayState } from './state';
|
||||
import { parseConfig } from '../utils/misc';
|
||||
@@ -62,6 +62,7 @@ class NfsSec extends ArrayState {
|
||||
}
|
||||
|
||||
find(query?: LooseObject): SecIni[] {
|
||||
// eslint-disable-next-line unicorn/no-array-callback-reference
|
||||
return super.find(query);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
|
||||
import path from 'path';
|
||||
import path from 'node:path';
|
||||
import mm from 'micromongo';
|
||||
import { paths } from '../paths';
|
||||
import { parseConfig } from '../utils/misc';
|
||||
@@ -25,8 +25,8 @@ const parse = (state: SharesIni[]): Share[] => {
|
||||
.map(([_, item]) => {
|
||||
const { free, size, include, exclude, useCache, ...rest } = item;
|
||||
const share: Share = {
|
||||
free: parseInt(free, 10),
|
||||
size: parseInt(size, 10),
|
||||
free: Number.parseInt(free, 10),
|
||||
size: Number.parseInt(size, 10),
|
||||
include: include.split(',').filter(_ => _),
|
||||
exclude: exclude.split(',').filter(_ => _),
|
||||
cache: useCache === 'yes',
|
||||
@@ -73,6 +73,7 @@ class Shares extends ArrayState {
|
||||
}
|
||||
|
||||
find(query?: LooseObject): Share[] {
|
||||
// eslint-disable-next-line unicorn/no-array-callback-reference
|
||||
return super.find(query);
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
|
||||
import path from 'path';
|
||||
import path from 'node:path';
|
||||
import mm from 'micromongo';
|
||||
import { paths } from '../paths';
|
||||
import { Slot } from '../types/states';
|
||||
@@ -111,6 +111,7 @@ class Slots extends ArrayState {
|
||||
}
|
||||
|
||||
find(query?: LooseObject): Slot[] {
|
||||
// eslint-disable-next-line unicorn/no-array-callback-reference
|
||||
return super.find(query);
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
|
||||
import path from 'path';
|
||||
import path from 'node:path';
|
||||
import { paths } from '../paths';
|
||||
import { ArrayState } from './state';
|
||||
import { parseConfig } from '../utils/misc';
|
||||
@@ -94,6 +94,7 @@ class SmbSec extends ArrayState {
|
||||
}
|
||||
|
||||
find(query?: Record<string, unknown>): any[] {
|
||||
// eslint-disable-next-line unicorn/no-array-callback-reference
|
||||
return super.find(query);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -69,6 +69,7 @@ export class ArrayState extends State {
|
||||
}
|
||||
|
||||
find(query: LooseObject = {}) {
|
||||
// eslint-disable-next-line unicorn/no-array-callback-reference, unicorn/no-array-method-this-argument
|
||||
return mm.find(this.data, query);
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
*/
|
||||
|
||||
import mm from 'micromongo';
|
||||
import path from 'path';
|
||||
import path from 'node:path';
|
||||
import { paths } from '../paths';
|
||||
import { User } from '../types/states';
|
||||
import { LooseObject } from '../types';
|
||||
@@ -33,7 +33,7 @@ const parseUser = (state: UserIni): User => {
|
||||
return user;
|
||||
};
|
||||
|
||||
const parse = (states: UserIni[]): User[] => Object.values(states).map(parseUser);
|
||||
const parse = (states: UserIni[]): User[] => Object.values(states).map(state => parseUser(state));
|
||||
|
||||
class Users extends ArrayState {
|
||||
private static instance: Users;
|
||||
@@ -77,6 +77,7 @@ class Users extends ArrayState {
|
||||
}
|
||||
|
||||
find(query?: LooseObject): User[] {
|
||||
// eslint-disable-next-line unicorn/no-array-callback-reference, unicorn/no-array-method-this-argument
|
||||
return mm.find(this.data, query);
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
|
||||
import path from 'path';
|
||||
import path from 'node:path';
|
||||
import { paths } from '../paths';
|
||||
import { Var } from '../types/states';
|
||||
import { IniStringBooleanOrAuto, IniStringBoolean } from '../types/ini';
|
||||
|
||||
@@ -22,11 +22,15 @@ export interface CoreContext {
|
||||
readonly user: Readonly<User>;
|
||||
}
|
||||
|
||||
type AnyJson = boolean | number | string | null | JsonArray | JsonMap;
|
||||
type JsonMap = Record<string, AnyJson>;
|
||||
interface JsonArray extends Array<AnyJson> {}
|
||||
|
||||
/**
|
||||
* Result object
|
||||
*/
|
||||
export interface CoreResult {
|
||||
json?: Record<string, unknown> | Array<Record<string, unknown>> | null;
|
||||
export interface CoreResult<Json = AnyJson> {
|
||||
json?: Json;
|
||||
text?: string;
|
||||
html?: string;
|
||||
}
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
// If it's "true", "yes" or "1" then it's true otherwise it's false
|
||||
export const toBoolean = (value: string): boolean => ['true', 'yes', '1'].includes(value?.toLowerCase().trim());
|
||||
export const toNumber = (value: string): number => parseInt(value, 10);
|
||||
export const toNumber = (value: string): number => Number.parseInt(value, 10);
|
||||
|
||||
type BooleanString = 'true' | 'false';
|
||||
|
||||
export const boolToString = (bool: boolean): BooleanString => {
|
||||
if (typeof bool === 'boolean') {
|
||||
throw new Error('Incorrect type, only true/false is allowed.');
|
||||
throw new TypeError('Incorrect type, only true/false is allowed.');
|
||||
}
|
||||
|
||||
return bool ? 'true' : 'false';
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
*/
|
||||
|
||||
import request from 'request-promise-native';
|
||||
import { envs } from '../../environments';
|
||||
import { environmentVariables } from '../../environments';
|
||||
import { coreLogger } from '../../log';
|
||||
import { catchHandlers } from '..';
|
||||
import { paths } from '../../paths';
|
||||
@@ -12,7 +12,7 @@ import { varState } from '../../states';
|
||||
import { LooseObject } from '../../types';
|
||||
|
||||
const socketPath = paths.get('emhttpd-socket')!;
|
||||
const dryRun = envs.DRY_RUN;
|
||||
const dryRun = environmentVariables.DRY_RUN;
|
||||
|
||||
/**
|
||||
* Run a command with emcmd.
|
||||
|
||||
@@ -20,11 +20,17 @@ windowPolyFill.register(false);
|
||||
global.XMLHttpRequest = xhr2;
|
||||
global.EventSource = EventSource;
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const NchanSubscriber = require('nchan');
|
||||
let nchan: any;
|
||||
const getNchan = async () => {
|
||||
if (nchan) {
|
||||
return nchan;
|
||||
}
|
||||
|
||||
nchan = await import('nchan');
|
||||
};
|
||||
|
||||
const getSubEndpoint = () => {
|
||||
const httpPort: string = states.varState.data?.port;
|
||||
const httpPort = states.varState.data?.port;
|
||||
return `http://localhost:${httpPort}/sub`;
|
||||
};
|
||||
|
||||
@@ -42,34 +48,37 @@ const endpointToStateMapping = {
|
||||
var: states.varState
|
||||
};
|
||||
|
||||
const subscribe = async (endpoint: string) => new Promise<void>(resolve => {
|
||||
const sub = new NchanSubscriber(`${getSubEndpoint()}/${endpoint}`, {
|
||||
subscriber: 'eventsource'
|
||||
const subscribe = async (endpoint: string) => {
|
||||
const NchanSubscriber = await getNchan();
|
||||
return new Promise<void>(resolve => {
|
||||
const sub = new NchanSubscriber(`${getSubEndpoint()}/${endpoint}`, {
|
||||
subscriber: 'eventsource'
|
||||
});
|
||||
|
||||
sub.on('connect', function (_event) {
|
||||
nchanLogger.debug('Connected!');
|
||||
resolve();
|
||||
});
|
||||
|
||||
sub.on('message', function (message, _messageMetadata) {
|
||||
try {
|
||||
const state = parseConfig({
|
||||
file: message,
|
||||
type: 'ini'
|
||||
});
|
||||
|
||||
// Update state
|
||||
endpointToStateMapping[endpoint].parse(state);
|
||||
} catch {}
|
||||
});
|
||||
|
||||
sub.on('error', function (error, error_description) {
|
||||
nchanLogger.error('Error: "%s" \nDescription: "%s"', error, error_description);
|
||||
});
|
||||
|
||||
sub.start();
|
||||
});
|
||||
|
||||
sub.on('connect', function (_event) {
|
||||
nchanLogger.debug('Connected!');
|
||||
resolve();
|
||||
});
|
||||
|
||||
sub.on('message', function (message, _messageMetadata) {
|
||||
try {
|
||||
const state = parseConfig({
|
||||
file: message,
|
||||
type: 'ini'
|
||||
});
|
||||
|
||||
// Update state
|
||||
endpointToStateMapping[endpoint].parse(state);
|
||||
} catch {}
|
||||
});
|
||||
|
||||
sub.on('error', function (error, error_description) {
|
||||
nchanLogger.error('Error: "%s" \nDescription: "%s"', error, error_description);
|
||||
});
|
||||
|
||||
sub.start();
|
||||
});
|
||||
};
|
||||
|
||||
export const subscribeToNchanEndpoint = async (endpoint: string) => {
|
||||
if (!Object.keys(endpointToStateMapping).includes(endpoint)) {
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
|
||||
import { performance } from 'perf_hooks';
|
||||
import { performance } from 'node:perf_hooks';
|
||||
import { log } from '../../log';
|
||||
|
||||
const timers = new Map();
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
export const attemptJSONParse = (text: string, fallback: any = undefined) => {
|
||||
export const attemptJSONParse = (text: string, fallback: any) => {
|
||||
try {
|
||||
return JSON.parse(text);
|
||||
} catch {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { readFileSync } from 'fs';
|
||||
import { readFileSync } from 'node:fs';
|
||||
|
||||
export const attemptReadFileSync = (path: string, fallback: any = undefined) => {
|
||||
export const attemptReadFileSync = (path: string, fallback?: any) => {
|
||||
try {
|
||||
return readFileSync(path, 'utf-8');
|
||||
} catch {
|
||||
|
||||
@@ -12,6 +12,7 @@ import { coreLogger } from '../../log';
|
||||
export const exitApp = (error?: Error, exitCode?: number) => {
|
||||
if (!error) {
|
||||
// Kill application immediately
|
||||
// eslint-disable-next-line unicorn/no-process-exit
|
||||
process.exit(exitCode ?? 0);
|
||||
}
|
||||
|
||||
@@ -32,6 +33,7 @@ export const exitApp = (error?: Error, exitCode?: number) => {
|
||||
coreLogger.error(error);
|
||||
|
||||
// Kill application
|
||||
// eslint-disable-next-line unicorn/no-process-exit
|
||||
process.exit(exitCode);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import btoa from 'btoa';
|
||||
import { promises } from 'fs';
|
||||
import { promises } from 'node:fs';
|
||||
import { varState } from '../../states';
|
||||
|
||||
// Get key file
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import fs from 'fs';
|
||||
import fs from 'node:fs';
|
||||
import { paths } from '../../paths';
|
||||
import { CacheManager } from '../../cache-manager';
|
||||
import { FileMissingError } from '../../errors';
|
||||
|
||||
@@ -22,6 +22,7 @@ export const globalErrorHandler = (error: Error) => {
|
||||
console.error(error);
|
||||
|
||||
// Kill application
|
||||
// eslint-disable-next-line unicorn/no-process-exit
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
|
||||
import fs from 'fs';
|
||||
import fs from 'node:fs';
|
||||
import { read as multiIniRead, Parser as MultiIniParser } from 'multi-ini';
|
||||
import ini from 'ini';
|
||||
import camelCaseKeys from 'camelcase-keys';
|
||||
@@ -110,7 +110,7 @@ export const parseConfig = <T>(options: Options): T => {
|
||||
}
|
||||
|
||||
// If multi-ini failed try ini
|
||||
if (fileContents.length >= 1 && Object.keys(data).length === 0) {
|
||||
if (fileContents.length > 0 && Object.keys(data).length === 0) {
|
||||
data = ini.parse(fileContents);
|
||||
}
|
||||
|
||||
|
||||
@@ -3,29 +3,29 @@ import FormData from 'form-data';
|
||||
import { varState } from '../../states';
|
||||
import { AppError } from '../../errors';
|
||||
|
||||
const sendFormToKeyServer = async (url: string, data: Record<string, unknown>) => {
|
||||
if (!data) {
|
||||
throw new AppError('Missing data field.');
|
||||
}
|
||||
|
||||
// Create form
|
||||
const body = new FormData();
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
if (value !== undefined) {
|
||||
body.append(key, String(value));
|
||||
}
|
||||
}
|
||||
|
||||
// Send form
|
||||
return fetch(url, {
|
||||
method: 'POST',
|
||||
body
|
||||
});
|
||||
};
|
||||
|
||||
export const validateApiKey = async (apiKey: string) => {
|
||||
const KEY_SERVER_KEY_VERIFICATION_ENDPOINT = process.env.KEY_SERVER_KEY_VERIFICATION_ENDPOINT ?? 'https://keys.lime-technology.com/validate/apikey';
|
||||
|
||||
const sendFormToKeyServer = async (url: string, data: Record<string, unknown>) => {
|
||||
if (!data) {
|
||||
throw new AppError('Missing data field.');
|
||||
}
|
||||
|
||||
// Create form
|
||||
const body = new FormData();
|
||||
Object.entries(data).forEach(([key, value]) => {
|
||||
if (value !== undefined) {
|
||||
body.append(key, String(value));
|
||||
}
|
||||
});
|
||||
|
||||
// Send form
|
||||
return fetch(url, {
|
||||
method: 'POST',
|
||||
body
|
||||
});
|
||||
};
|
||||
|
||||
// Send apiKey, etc. to key-server for verification
|
||||
const response = await sendFormToKeyServer(KEY_SERVER_KEY_VERIFICATION_ENDPOINT, {
|
||||
guid: varState.data.flashGuid,
|
||||
|
||||
@@ -3,10 +3,11 @@
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
|
||||
import path from 'path';
|
||||
import path from 'node:path';
|
||||
import execa from 'execa';
|
||||
import { PhpError, FileMissingError } from '../../errors';
|
||||
import { LooseObject, LooseStringObject } from '../../types';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
/**
|
||||
* Encode GET/POST params.
|
||||
@@ -19,7 +20,7 @@ const encodeParameters = (parameters: LooseObject) => {
|
||||
// Join query params together
|
||||
return Object.entries(parameters).map(kv => {
|
||||
// Encode each section and join
|
||||
return kv.map(encodeURIComponent).join('=');
|
||||
return kv.map(keyValuePair => encodeURIComponent(keyValuePair)).join('=');
|
||||
}).join('&');
|
||||
};
|
||||
|
||||
@@ -42,9 +43,10 @@ export const phpLoader = async (options: Options) => {
|
||||
const options_ = [
|
||||
'./wrapper.php',
|
||||
method,
|
||||
`${file}${Object.keys(query).length >= 1 ? ('?' + encodeParameters(query)) : ''}`,
|
||||
`${file}${Object.keys(query).length > 0 ? ('?' + encodeParameters(query)) : ''}`,
|
||||
encodeParameters(body)
|
||||
];
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
return execa('php', options_, { cwd: __dirname })
|
||||
.then(({ stdout }) => {
|
||||
|
||||
@@ -12,5 +12,5 @@ import { LooseObject } from '../../types';
|
||||
*/
|
||||
export const hasFields = (object: LooseObject, fields: string[]) => {
|
||||
const keys = Object.keys(object);
|
||||
return keys.length >= 1 ? fields.filter(field => !keys.includes(field)) : fields;
|
||||
return keys.length > 0 ? fields.filter(field => !keys.includes(field)) : fields;
|
||||
};
|
||||
|
||||
@@ -25,9 +25,9 @@ export const sanitizeVendor = (vendorName: string): string => {
|
||||
|
||||
// Remove un-needed text
|
||||
const junk = [' Corporation', ' Semiconductor ', ' Technology Group Ltd.', ' System, Inc.', ' Systems, Inc.', ' Co., Ltd.', ', Ltd.', ', Ltd', ', Inc.'];
|
||||
junk.forEach(item => {
|
||||
for (const item of junk) {
|
||||
vendor = vendor.replace(item, '');
|
||||
});
|
||||
}
|
||||
|
||||
vendor = vendor.replace('Advanced Micro Devices', 'AMD');
|
||||
vendor = vendor.replace('Samsung Electronics Co.', 'Samsung');
|
||||
|
||||
@@ -3,8 +3,8 @@
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { Hypervisor } from '@vmngr/libvirt';
|
||||
import { watch } from 'chokidar';
|
||||
import { log } from '../../log';
|
||||
@@ -33,7 +33,8 @@ libvirtDirWatcher.on('all', async (event, fileName) => {
|
||||
|
||||
// Kill connection
|
||||
await hypervisor.connectClose().catch(() => {
|
||||
return undefined;
|
||||
// Ignore error
|
||||
// @todo: Maybe this is what's causing vms to not start?
|
||||
});
|
||||
|
||||
hypervisor = null;
|
||||
@@ -58,7 +59,7 @@ export const getHypervisor = async (useCache = true) => {
|
||||
// Check if libvirt service is running and then connect
|
||||
const running = fs.existsSync(path.join(libvirtDir, 'libvirtd.pid'));
|
||||
if (!running) {
|
||||
return null;
|
||||
return;
|
||||
}
|
||||
|
||||
hypervisor = new Hypervisor({ uri });
|
||||
@@ -124,7 +125,7 @@ const watchLibvirt = async (useCache = true) => {
|
||||
// If the result is the same as the cache wait 5s then retry
|
||||
if (JSON.stringify(cachedDomains) === JSON.stringify(resolvedDomains)) {
|
||||
log.debug('libvirt: No changes detected.');
|
||||
await sleep(5_000);
|
||||
await sleep(5000);
|
||||
return watchLibvirt();
|
||||
}
|
||||
|
||||
@@ -147,18 +148,18 @@ const watchLibvirt = async (useCache = true) => {
|
||||
|
||||
log.debug('libvirt: Published to "%s" with %j', 'vms', data);
|
||||
|
||||
await sleep(1_000);
|
||||
await sleep(1000);
|
||||
return watchLibvirt();
|
||||
} catch (error: unknown) {
|
||||
// We need to try and reconnect
|
||||
if (`${error}`.includes('invalid connection pointer')) {
|
||||
if (String(error).includes('invalid connection pointer')) {
|
||||
log.warn('Reconnecting to libvirt socket...');
|
||||
await sleep(5_000);
|
||||
await sleep(5000);
|
||||
return watchLibvirt(false);
|
||||
}
|
||||
|
||||
log.error('Failed watching libvirt with "%s"', error);
|
||||
await sleep(5_000);
|
||||
await sleep(5000);
|
||||
return watchLibvirt();
|
||||
}
|
||||
};
|
||||
|
||||
@@ -7,7 +7,7 @@ import execa from 'execa';
|
||||
import { cleanStdout } from '..';
|
||||
import { PciDevice } from '../../types';
|
||||
|
||||
const regex = new RegExp(/^(?<id>\S+) "(?<type>[^"]+) \[(?<typeid>[a-f\d]{4})]" "(?<vendorname>[^"]+) \[(?<vendorid>[a-f\d]{4})]" "(?<productname>[^"]+) \[(?<productid>[a-f\d]{4})]"/);
|
||||
const regex = new RegExp(/^(?<id>\S+) "(?<type>[^"]+) \[(?<typeid>[\da-f]{4})]" "(?<vendorname>[^"]+) \[(?<vendorid>[\da-f]{4})]" "(?<productname>[^"]+) \[(?<productid>[\da-f]{4})]"/);
|
||||
|
||||
/**
|
||||
* Get pci devices.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import prettyBytes from 'pretty-bytes';
|
||||
import { coreLogger } from '../log';
|
||||
|
||||
|
||||
@@ -38,15 +38,13 @@ export const myservers = () => {
|
||||
|
||||
// If we have one enable/disable it. If this is
|
||||
// missing it's likely we shipped without Sentry
|
||||
// initialised. This would be done for a reason!
|
||||
if (sentryClient) {
|
||||
// Check if the value changed
|
||||
if (sentryClient.getOptions().enabled !== isEnabled) {
|
||||
sentryClient.getOptions().enabled = isEnabled;
|
||||
// initialized. This would be done for a reason!
|
||||
if (sentryClient && // Check if the value changed
|
||||
sentryClient.getOptions().enabled !== isEnabled) {
|
||||
sentryClient.getOptions().enabled = isEnabled;
|
||||
|
||||
// Log for debugging
|
||||
coreLogger.debug('%s crash reporting!', isEnabled ? 'Enabled' : 'Disabled');
|
||||
}
|
||||
// Log for debugging
|
||||
coreLogger.debug('%s crash reporting!', isEnabled ? 'Enabled' : 'Disabled');
|
||||
}
|
||||
|
||||
// @todo: add cfg files similar to states
|
||||
@@ -71,7 +69,7 @@ export const myservers = () => {
|
||||
});
|
||||
|
||||
// Extra origins file has likely updated
|
||||
extraOriginsWatcher.on('all', async event => {
|
||||
extraOriginsWatcher.on('all', async _event => {
|
||||
origins.extra = extraOriginPath ? attemptJSONParse(attemptReadFileSync(extraOriginPath, ''), []) : [];
|
||||
});
|
||||
|
||||
@@ -90,8 +88,8 @@ export const myservers = () => {
|
||||
cert.hash = certPem ? pki.certificateFromPem(certPem)?.subject?.attributes?.[0]?.value as string : undefined;
|
||||
});
|
||||
},
|
||||
stop() {
|
||||
watchers.forEach(async watcher => watcher.close());
|
||||
async stop() {
|
||||
await Promise.all(watchers.map(async watcher => watcher.close()));
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
@@ -49,8 +49,8 @@ export const plugins = () => {
|
||||
// Save ref for cleanup
|
||||
watchers.push(watcher);
|
||||
},
|
||||
stop() {
|
||||
watchers.forEach(async watcher => watcher.close());
|
||||
async stop() {
|
||||
await Promise.all(watchers.map(async watcher => watcher.close()));
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
@@ -5,37 +5,37 @@
|
||||
|
||||
import { coreLogger, logger } from '../log';
|
||||
import { pubsub } from '../pubsub';
|
||||
import { getKeyFile, sleep } from '../utils';
|
||||
import { getKeyFile } from '../utils';
|
||||
import { bus } from '../bus';
|
||||
|
||||
export const keyFile = () => {
|
||||
const listener = async (data: any) => {
|
||||
// Log for debugging
|
||||
coreLogger.debug('Var state updated, publishing registration event.');
|
||||
const listener = async (data: any) => {
|
||||
// Log for debugging
|
||||
coreLogger.debug('Var state updated, publishing registration event.');
|
||||
|
||||
// Get key file
|
||||
const keyFile = data.var.node.regFile ? await getKeyFile(data.var.node.regFile) : '';
|
||||
const registration = {
|
||||
guid: data.var.node.regGuid,
|
||||
type: data.var.node.regTy.toUpperCase(),
|
||||
state: data.var.node.regState,
|
||||
keyFile: {
|
||||
location: data.var.node.regFile,
|
||||
contents: keyFile
|
||||
}
|
||||
};
|
||||
|
||||
logger.debug('Publishing %s to registration', JSON.stringify(registration, null, 2));
|
||||
|
||||
// Publish event
|
||||
// This will end up going to the graphql endpoint
|
||||
await pubsub.publish('registration', {
|
||||
registration
|
||||
}).catch(error => {
|
||||
coreLogger.error('Failed publishing to "registration" with %s', error);
|
||||
});
|
||||
// Get key file
|
||||
const keyFile = data.var.node.regFile ? await getKeyFile(data.var.node.regFile) : '';
|
||||
const registration = {
|
||||
guid: data.var.node.regGuid,
|
||||
type: data.var.node.regTy.toUpperCase(),
|
||||
state: data.var.node.regState,
|
||||
keyFile: {
|
||||
location: data.var.node.regFile,
|
||||
contents: keyFile
|
||||
}
|
||||
};
|
||||
|
||||
logger.debug('Publishing %s to registration', JSON.stringify(registration, null, 2));
|
||||
|
||||
// Publish event
|
||||
// This will end up going to the graphql endpoint
|
||||
await pubsub.publish('registration', {
|
||||
registration
|
||||
}).catch(error => {
|
||||
coreLogger.error('Failed publishing to "registration" with %s', error);
|
||||
});
|
||||
};
|
||||
|
||||
export const keyFile = () => {
|
||||
return {
|
||||
start() {
|
||||
// Update registration when regTy, regCheck, etc changes
|
||||
|
||||
@@ -77,8 +77,8 @@ export const states = () => {
|
||||
// Save ref for cleanup
|
||||
watchers.push(watcher);
|
||||
},
|
||||
stop() {
|
||||
watchers.forEach(async watcher => watcher.close());
|
||||
async stop() {
|
||||
await Promise.all(watchers.map(async watcher => watcher.close()));
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
@@ -16,7 +16,7 @@ interface Context extends CoreContext {
|
||||
};
|
||||
}
|
||||
|
||||
export default async (_: unknown, args: unknown, context: Context, info: any) => {
|
||||
export default async (_: unknown, _arguments: unknown, context: Context, info: any) => {
|
||||
const topLevelFields = Object.keys(graphqlFields(info));
|
||||
const disks = await getDisks(context, { temperature: topLevelFields.includes('temperature') });
|
||||
return disks.json;
|
||||
|
||||
@@ -3,9 +3,9 @@
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
|
||||
import { join } from 'path';
|
||||
import { promises as fs, statSync, existsSync } from 'fs';
|
||||
import { paths, log, graphqlLogger } from '../../../core';
|
||||
import { join } from 'node:path';
|
||||
import { promises as fs, statSync, existsSync } from 'node:fs';
|
||||
import { paths, graphqlLogger } from '../../../core';
|
||||
|
||||
// Consts
|
||||
const ONE_BYTE = 1;
|
||||
@@ -125,7 +125,7 @@ export default async () => {
|
||||
url: serverCase
|
||||
}
|
||||
};
|
||||
} catch (error: unknown) {
|
||||
} catch {
|
||||
return {
|
||||
case: states.couldNotReadImage
|
||||
};
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
|
||||
import { readFileSync } from 'fs';
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { mergeTypeDefs } from '@graphql-tools/merge';
|
||||
|
||||
const files = [
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
|
||||
import os from 'os';
|
||||
import os from 'node:os';
|
||||
import am from 'am';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import exitHook from 'async-exit-hook';
|
||||
@@ -12,14 +12,12 @@ import { core, states, coreLogger, log, apiManager, apiManagerLogger } from './c
|
||||
import { server } from './server';
|
||||
import { mothership } from './mothership/subscribe-to-servers';
|
||||
import { startInternal, sockets } from './mothership';
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const { version } = require('../package.json') as { version: string };
|
||||
import { version } from '../package.json';
|
||||
|
||||
// Send errors to server if enabled
|
||||
Sentry.init({
|
||||
dsn: process.env.SENTRY_DSN,
|
||||
tracesSampleRate: 1.0,
|
||||
tracesSampleRate: 1,
|
||||
release: `unraid-api@${version}`,
|
||||
environment: process.env.ENVIRONMENT ?? 'unknown',
|
||||
serverName: os.hostname(),
|
||||
@@ -172,6 +170,7 @@ am(async () => {
|
||||
await Sentry.flush(5000);
|
||||
|
||||
// Kill application
|
||||
// eslint-disable-next-line unicorn/no-process-exit
|
||||
process.exit(1);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,7 +4,6 @@ import { Serializer as IniSerializer } from 'multi-ini';
|
||||
import { INTERNAL_WS_LINK, MOTHERSHIP_RELAY_WS_LINK } from '../consts';
|
||||
import { apiManager } from '../core/api-manager';
|
||||
import { log } from '../core/log';
|
||||
// eslint-disable-next-line unicorn/prevent-abbreviations
|
||||
import { varState } from '../core/states/var';
|
||||
import packageJson from '../../package.json';
|
||||
import { paths } from '../core/paths';
|
||||
@@ -12,9 +11,7 @@ import { loadState } from '../core/utils/misc/load-state';
|
||||
import { subscribeToServers } from './subscribe-to-servers';
|
||||
|
||||
export const sockets = {
|
||||
// eslint-disable-next-line unicorn/no-null
|
||||
internal: null as GracefulWebSocket | null,
|
||||
// eslint-disable-next-line unicorn/no-null
|
||||
relay: null as GracefulWebSocket | null
|
||||
};
|
||||
let internalOpen = false;
|
||||
|
||||
@@ -43,7 +43,7 @@ process.on('SIGTERM', () => {
|
||||
* Run a module.
|
||||
*/
|
||||
export const run = async (channel: string, mutation: string, options: RunOptions) => {
|
||||
const timestamp = new Date().getTime();
|
||||
const timestamp = Date.now();
|
||||
const {
|
||||
node,
|
||||
moduleToRun,
|
||||
@@ -70,7 +70,7 @@ export const run = async (channel: string, mutation: string, options: RunOptions
|
||||
coreLogger.silly(`run:${moduleToRun.name} %j`, result.json);
|
||||
|
||||
// Save result
|
||||
await publish(channel, mutation, result.json as any);
|
||||
await publish(channel, mutation, result.json);
|
||||
|
||||
// Bail as we're done looping
|
||||
if (!loop || loop === 0) {
|
||||
@@ -78,7 +78,7 @@ export const run = async (channel: string, mutation: string, options: RunOptions
|
||||
}
|
||||
|
||||
// If we haven't waited long enough wait a little more
|
||||
const timeTaken = (new Date().getTime() - timestamp);
|
||||
const timeTaken = Date.now() - timestamp;
|
||||
const minimumTime = 1000;
|
||||
if (timeTaken < minimumTime) {
|
||||
await sleep(minimumTime - timeTaken);
|
||||
|
||||
@@ -3,15 +3,15 @@
|
||||
* Written by: Alexis Tyler
|
||||
*/
|
||||
|
||||
import fs from 'fs';
|
||||
import net from 'net';
|
||||
import path from 'path';
|
||||
import fs from 'node:fs';
|
||||
import net from 'node:net';
|
||||
import path from 'node:path';
|
||||
import execa from 'execa';
|
||||
import cors from 'cors';
|
||||
import stoppable from 'stoppable';
|
||||
import chokidar from 'chokidar';
|
||||
import express from 'express';
|
||||
import http from 'http';
|
||||
import http from 'node:http';
|
||||
import WebSocket from 'ws';
|
||||
import { pki } from 'node-forge';
|
||||
import { ApolloServer } from 'apollo-server-express';
|
||||
@@ -83,7 +83,7 @@ const getAllowedOrigins = (): string[] => {
|
||||
const webuiHTTPSPort = (varState.data.portssl ?? 443) === 443 ? '' : varState.data.portssl;
|
||||
|
||||
// Get wan https port
|
||||
const wanHTTPSPort = parseInt(myServersConfig?.remote?.wanport ?? '', 10) === 443 ? '' : myServersConfig?.remote?.wanport;
|
||||
const wanHTTPSPort = Number.parseInt(myServersConfig?.remote?.wanport ?? '', 10) === 443 ? '' : myServersConfig?.remote?.wanport;
|
||||
|
||||
// Check if wan access is enabled
|
||||
const wanAccessEnabled = myServersConfig?.remote?.wanaccess === 'yes';
|
||||
@@ -92,7 +92,7 @@ const getAllowedOrigins = (): string[] => {
|
||||
return [...new Set([
|
||||
// Localhost - Used for GUI mode
|
||||
`http://localhost${webuiHTTPPort ? `:${webuiHTTPPort}` : ''}`,
|
||||
|
||||
|
||||
// IP
|
||||
`http://${localIp}${webuiHTTPPort ? `:${webuiHTTPPort}` : ''}`,
|
||||
`https://${localIp}${webuiHTTPSPort ? `:${webuiHTTPSPort}` : ''}`,
|
||||
@@ -210,7 +210,7 @@ const httpServer = http.createServer(app);
|
||||
const stoppableServer = stoppable(httpServer);
|
||||
|
||||
// Port is a UNIX socket file
|
||||
if (isNaN(parseInt(port, 10))) {
|
||||
if (Number.isNaN(Number.parseInt(port, 10))) {
|
||||
stoppableServer.on('listening', () => {
|
||||
// Set permissions
|
||||
fs.chmodSync(port, 660);
|
||||
@@ -224,16 +224,16 @@ if (isNaN(parseInt(port, 10))) {
|
||||
|
||||
// Check if port is unix socket or numbered port
|
||||
// If it's a numbered port then throw
|
||||
if (!isNaN(parseInt(port, 10))) {
|
||||
if (!Number.isNaN(Number.parseInt(port, 10))) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Check if the process that made this file is still alive
|
||||
const pid = await execa.command(`lsof -t ${port}`)
|
||||
.then(output => {
|
||||
const pids = cleanStdout(output).split('\n');
|
||||
return pids[0];
|
||||
}).catch(() => undefined);
|
||||
.then(output => cleanStdout(output).split('\n')[0])
|
||||
.catch(() => {
|
||||
// Do nothing
|
||||
});
|
||||
|
||||
// Try to kill it?
|
||||
if (pid) {
|
||||
@@ -317,10 +317,10 @@ export const server = {
|
||||
wsServer.close();
|
||||
|
||||
// Unlink socket file
|
||||
if (isNaN(parseInt(port, 10))) {
|
||||
if (Number.isNaN(Number.parseInt(port, 10))) {
|
||||
try {
|
||||
fs.unlinkSync(port);
|
||||
} catch { }
|
||||
} catch {}
|
||||
}
|
||||
|
||||
// Run callback
|
||||
|
||||
1
app/types/index.d.ts
vendored
1
app/types/index.d.ts
vendored
@@ -1 +0,0 @@
|
||||
declare module '*.json';
|
||||
|
||||
14
package.json
14
package.json
@@ -1,31 +1,31 @@
|
||||
{
|
||||
"name": "@unraid/api",
|
||||
"version": "2.24.0",
|
||||
"main": "dist/index.cjs",
|
||||
"main": "dist/index.js",
|
||||
"repository": "git@github.com:unraid/api.git",
|
||||
"author": "Alexis Tyler <xo@wvvw.me> (https://wvvw.me/)",
|
||||
"license": "UNLICENSED",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"build": "npm run build-app && npm run build-cli && npm run copy-schemas",
|
||||
"build-app": "npx tsup ./app/index.ts",
|
||||
"build-cli": "npx tsup ./app/cli.ts",
|
||||
"build-binary-step-1": "nexe ./dist/cli.cjs --make=\"-j$(nproc 2> /dev/null || echo 1)\" -r './dist/**/*' -r './node_modules' && mv ./cli ./unraid-api && echo '✔ Binary built: ./unraid-api'",
|
||||
"build-app": "npx tsup ./app/index.ts --format esm",
|
||||
"build-cli": "npx tsup ./app/cli.ts --format esm",
|
||||
"build-binary-step-1": "nexe ./dist/cli.js --make=\"-j$(nproc 2> /dev/null || echo 1)\" -r './dist/**/*' -r './node_modules' && mv ./cli ./unraid-api && echo '✔ Binary built: ./unraid-api'",
|
||||
"build-binary-step-2": "rm -rf ./node_modules && rm -rf ./dist && echo '✔ Source files deleted'",
|
||||
"build-binary": "npm run build-binary-step-1 && npm run build-binary-step-2",
|
||||
"copy-schemas": "cpx app/**/*.graphql dist/types",
|
||||
"clean": "modclean --no-progress --run --path .",
|
||||
"commit": "npx git-cz",
|
||||
"lint": "eslint app/**/*.ts",
|
||||
"lint": "eslint app",
|
||||
"lint:quiet": "eslint --quiet",
|
||||
"lint:fix": "eslint --fix",
|
||||
"test": "nyc ava",
|
||||
"cover": "npm run cover:types && npm run cover:unit && npm run cover:report",
|
||||
"cover:types": "typescript-coverage-report && tsup ./app/cli.ts --dts",
|
||||
"cover:types": "typescript-coverage-report && tsup ./app/cli.ts --dts --format esm",
|
||||
"cover:unit": "nyc --silent npm run test",
|
||||
"cover:report": "nyc report --reporter=lcov --reporter=text",
|
||||
"patch": "npm-run-all patch:**",
|
||||
"patch:subscriptions-transport-ws": "node ./.scripts/patches/subscriptions-transport-ws.cjs",
|
||||
"patch:subscriptions-transport-ws": "node ./.scripts/patches/subscriptions-transport-ws.js",
|
||||
"release": "standard-version",
|
||||
"update-bundle-dependencies": "bundle-dependencies update"
|
||||
},
|
||||
|
||||
@@ -36,7 +36,7 @@ test.serial('Loads production when no env is set', async t => {
|
||||
writeFileSync(PATHS_MYSERVERS_ENV, '');
|
||||
|
||||
// Run 'switch-env'
|
||||
const { stdout: output } = await exec(`PATHS_MYSERVERS_ENV=${PATHS_MYSERVERS_ENV} PATHS_UNRAID_API_BASE=${PATHS_UNRAID_API_BASE} ${process.execPath} ./dist/cli.cjs switch-env`);
|
||||
const { stdout: output } = await exec(`PATHS_MYSERVERS_ENV=${PATHS_MYSERVERS_ENV} PATHS_UNRAID_API_BASE=${PATHS_UNRAID_API_BASE} ${process.execPath} ./dist/cli.js switch-env`);
|
||||
|
||||
// Split the lines
|
||||
const lines = output.split('\n');
|
||||
@@ -62,7 +62,7 @@ test.serial('Loads production when switching from staging', async t => {
|
||||
writeFileSync(PATHS_MYSERVERS_ENV, 'env="staging"');
|
||||
|
||||
// Run 'switch-env'
|
||||
const { stdout: output } = await exec(`PATHS_MYSERVERS_ENV=${PATHS_MYSERVERS_ENV} PATHS_UNRAID_API_BASE=${PATHS_UNRAID_API_BASE} ${process.execPath} ./dist/cli.cjs switch-env`);
|
||||
const { stdout: output } = await exec(`PATHS_MYSERVERS_ENV=${PATHS_MYSERVERS_ENV} PATHS_UNRAID_API_BASE=${PATHS_UNRAID_API_BASE} ${process.execPath} ./dist/cli.js switch-env`);
|
||||
|
||||
// Split the lines
|
||||
const lines = output.split('\n');
|
||||
@@ -87,7 +87,7 @@ test.serial('Loads staging when switching from production', async t => {
|
||||
writeFileSync(PATHS_MYSERVERS_ENV, 'env="production"');
|
||||
|
||||
// Run 'switch-env'
|
||||
const { stdout: output } = await exec(`PATHS_MYSERVERS_ENV=${PATHS_MYSERVERS_ENV} PATHS_UNRAID_API_BASE=${PATHS_UNRAID_API_BASE} ${process.execPath} ./dist/cli.cjs switch-env`);
|
||||
const { stdout: output } = await exec(`PATHS_MYSERVERS_ENV=${PATHS_MYSERVERS_ENV} PATHS_UNRAID_API_BASE=${PATHS_UNRAID_API_BASE} ${process.execPath} ./dist/cli.js switch-env`);
|
||||
|
||||
// Split the lines
|
||||
const lines = output.split('\n');
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
/* Basic Options */
|
||||
// "incremental": true, /* Enable incremental compilation */
|
||||
"target": "esnext", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */
|
||||
"module": "esnext", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
|
||||
"module": "ES2020", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
|
||||
// "lib": [], /* Specify library files to be included in the compilation. */
|
||||
"allowJs": false, /* Allow javascript files to be compiled. */
|
||||
// "checkJs": true, /* Report errors in .js files. */
|
||||
@@ -21,7 +21,7 @@
|
||||
// "sourceMap": true, /* Generates corresponding '.map' file. */
|
||||
// "outFile": "./", /* Concatenate and emit output to single file. */
|
||||
"outDir": "./dist", /* Redirect output structure to the directory. */
|
||||
"rootDir": "./app", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
|
||||
"rootDirs": ["./app", "./test"], /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
|
||||
// "composite": true, /* Enable project compilation */
|
||||
// "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */
|
||||
"removeComments": true, /* Do not emit comments to output. */
|
||||
@@ -73,6 +73,6 @@
|
||||
|
||||
/* Advanced Options */
|
||||
"forceConsistentCasingInFileNames": true /* Disallow inconsistently-cased references to the same file. */,
|
||||
"resolveJsonModule": false
|
||||
"resolveJsonModule": true
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user