Feat/local-plugin (#1125)

<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->
## Summary by CodeRabbit


• New Features  
 - Enhanced the login experience with improved session management and
two-factor authentication.
 - Introduced a comprehensive README for the Unraid Plugin Builder,
detailing development workflows and commands.

• Chores  
 - Streamlined build, packaging, and deployment processes with updated
dependency and environment configurations.
 - Updated Docker configurations to support pnpm as the package manager.
 - Added new environment variables for better configuration management.
 - Introduced new scripts for improved build and packaging processes.  

• Tests  
 - Removed outdated test cases and simplified test setups.  

• Refactor  
 - Modernized internal code structure and asynchronous handling for
improved overall performance.
 - Transitioned imports from lodash to lodash-es for better module
handling.
 - Updated environment variable management and configuration settings.  
 - Enhanced the build script for improved deployment processes.  
 - Updated the notification handling structure to improve efficiency.

<!-- end of auto-generated comment: release notes by coderabbit.ai -->
This commit is contained in:
Eli Bosley
2025-02-11 11:45:26 -05:00
committed by GitHub
parent 59d6c1b678
commit 753f1588b8
47 changed files with 13722 additions and 19279 deletions
+115 -89
View File
@@ -34,66 +34,106 @@ jobs:
- name: Validate branch and tag
run: exit 0
build-test-api:
test-api:
defaults:
run:
working-directory: api
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v4
- name: Install Node
uses: actions/setup-node@v4
with:
node-version-file: "api/.nvmrc"
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
version: 8
run_install: false
- name: Get pnpm store directory
id: pnpm-cache
shell: bash
run: |
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
- uses: actions/cache@v4
name: Setup pnpm cache
with:
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('api/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-store-
- name: PNPM Install
run: pnpm install
- name: Lint
run: pnpm run lint
- name: Test
run: pnpm run coverage
build-api:
name: Build and Test API
runs-on: ubuntu-latest
defaults:
run:
working-directory: api
outputs:
API_VERSION: ${{ steps.vars.outputs.API_VERSION }}
API_MD5: ${{ steps.set-hashes.outputs.API_MD5 }}
API_SHA256: ${{ steps.set-hashes.outputs.API_SHA256 }}
steps:
- name: Checkout repo
uses: actions/checkout@v4
- name: Build with Buildx
uses: docker/setup-buildx-action@v3
with:
install: true
platforms: linux/amd64
- name: Build Builder
uses: docker/build-push-action@v6
with:
context: ./api
push: false
tags: builder:latest
cache-from: type=gha,ref=builder:latest
cache-to: type=gha,mode=max,ref=builder:latest
load: true
- name: Lint inside of the docker container
continue-on-error: false
run: |
docker run --rm builder npm run lint
- name: Test inside of the docker container
- name: Install Node
uses: actions/setup-node@v4
with:
node-version-file: "api/.nvmrc"
- uses: pnpm/action-setup@v4
name: Install pnpm
with:
version: 8
run_install: false
- name: Get pnpm store directory
id: pnpm-cache
shell: bash
run: |
git fetch --depth=2 origin main
if git diff --name-only --relative=api origin/main HEAD | grep -q '.'; then
docker run --rm builder npm run coverage
else
echo "No changes in /api folder, skipping coverage."
fi
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
- uses: actions/cache@v4
name: Setup pnpm cache
with:
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('api/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-store-
- name: Cache APT Packages
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
with:
packages: bash procps python3 libvirt-dev jq zstd git build-essential
version: 1.0
- name: PNPM Install
run: pnpm install
- name: Lint
run: pnpm run lint
- name: Build
run: pnpm run build
- name: Get Git Short Sha and API version
id: vars
run: |
GIT_SHA=$(git rev-parse --short HEAD)
IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '')
PACKAGE_LOCK_VERSION=$(jq -r '.version' package-lock.json)
echo "GIT_SHA=$GIT_SHA" >> $GITHUB_OUTPUT
echo "IS_TAGGED=$IS_TAGGED" >> $GITHUB_OUTPUT
echo "PACKAGE_LOCK_VERSION=$PACKAGE_LOCK_VERSION" >> $GITHUB_OUTPUT
echo "API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")" >> $GITHUB_OUTPUT
- name: Build inside of the docker container
id: build-pack-binary
run: |
docker run --rm -v ${{ github.workspace }}/api/deploy/release:/app/deploy/release -e API_VERSION=${{ steps.vars.outputs.API_VERSION }} builder npm run build-and-pack
- name: Set Hashes
id: set-hashes
run: |
echo "API_MD5=$(md5sum ${{ github.workspace }}/api/deploy/release/*.tgz | awk '{ print $1 }')" >> $GITHUB_OUTPUT
echo "API_SHA256=$(sha256sum ${{ github.workspace }}/api/deploy/release/*.tgz | awk '{ print $1 }')" >> $GITHUB_OUTPUT
PACKAGE_LOCK_VERSION=$(jq -r '.version' package.json)
API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")
export API_VERSION
- name: Build
run: pnpm run build-and-pack
- name: Upload tgz to Github artifacts
uses: actions/upload-artifact@v4
@@ -187,7 +227,7 @@ jobs:
path: web/.nuxt/nuxt-custom-elements/dist/unraid-components
build-plugin:
needs: [build-test-api, build-web, build-unraid-ui-webcomponents]
needs: [build-api, build-web, build-unraid-ui-webcomponents]
defaults:
run:
working-directory: plugin
@@ -201,38 +241,42 @@ jobs:
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Build with Buildx
uses: docker/setup-buildx-action@v3
- name: Install node
uses: actions/setup-node@v4
with:
install: true
platforms: linux/amd64
- name: Build Builder
uses: docker/build-push-action@v6
with:
context: ./plugin
push: false
tags: plugin-builder:latest
cache-from: type=gha,ref=plugin-builder:latest
cache-to: type=gha,mode=max,ref=plugin-builder:latest
load: true
cache: "npm"
cache-dependency-path: |
plugin/package-lock.json
node-version-file: ".nvmrc"
- name: Install dependencies
run: npm ci
- name: Download Unraid Web Components
uses: actions/download-artifact@v4
with:
pattern: unraid-wc-*
path: ./plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components
merge-multiple: true
- name: Build Plugin
- name: Download Unraid API
uses: actions/download-artifact@v4
with:
name: unraid-api
path: /tmp/unraid-api/
- name: Extract Unraid API and Build Plugin
run: |
echo "API_VERSION=${{needs.build-test-api.outputs.API_VERSION}}" > .env
echo "API_SHA256=${{needs.build-test-api.outputs.API_SHA256}}" >> .env
echo "PR=${{ github.event.pull_request.number }}" >> .env
npm run start
tar -xzf /tmp/unraid-api/unraid-api.tgz -C ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/unraid-api
cd ${{ github.workspace }}/plugin
if [ -n "${{ github.event.pull_request.number }}" ]; then
export TAG=PR${{ github.event.pull_request.number }}
fi
npm run build
- name: Upload binary txz and plg to Github artifacts
uses: actions/upload-artifact@v4
with:
name: connect-files
path: |
plugin/deploy/release/plugins/*.plg
plugin/deploy/release/plugins/
plugin/deploy/release/archive/*.txz
retention-days: 5
if-no-files-found: error
@@ -241,7 +285,7 @@ jobs:
if: |
github.event_name == 'pull_request'
runs-on: ubuntu-latest
needs: [build-plugin]
needs: [test-api, build-plugin]
steps:
- name: Checkout repo
uses: actions/checkout@v4
@@ -249,12 +293,6 @@ jobs:
- name: Make PR Release Folder
run: mkdir pr-release/
- name: Download unraid-api binary tgz
uses: actions/download-artifact@v4
with:
name: unraid-api
path: pr-release
- name: Download plugin binary tgz
uses: actions/download-artifact@v4
with:
@@ -263,7 +301,7 @@ jobs:
- name: Copy other release files to pr-release
run: |
cp archive/*.txz pr-release/
cp plugins/dynamix.unraid.net.pr.plg pr-release/dynamix.unraid.net.plg
cp plugins/pr/dynamix.unraid.net.plg pr-release/dynamix.unraid.net.plg
- name: Upload to Cloudflare
uses: jakejarvis/s3-sync-action@v0.5.1
@@ -293,7 +331,7 @@ jobs:
# Only release if this is a push to the main branch
if: startsWith(github.ref, 'refs/heads/main')
runs-on: ubuntu-latest
needs: [build-plugin]
needs: [test-api, build-plugin]
steps:
- name: Checkout repo
@@ -302,12 +340,6 @@ jobs:
- name: Make Staging Release Folder
run: mkdir staging-release/
- name: Download unraid-api binary tgz
uses: actions/download-artifact@v4
with:
name: unraid-api
path: staging-release
- name: Download plugin binary tgz
uses: actions/download-artifact@v4
with:
@@ -316,7 +348,7 @@ jobs:
- name: Copy Files for Staging Release
run: |
cp archive/*.txz staging-release/
cp plugins/dynamix.unraid.net.staging.plg staging-release/dynamix.unraid.net.plg
cp plugins/staging/dynamix.unraid.net.plg staging-release/dynamix.unraid.net.plg
ls -al staging-release
- name: Upload Staging Plugin to Cloudflare Bucket
@@ -335,17 +367,11 @@ jobs:
if: |
startsWith(github.ref, 'refs/tags/v')
runs-on: ubuntu-latest
needs: [build-plugin]
needs: [test-api, build-plugin]
steps:
- name: Checkout repo
uses: actions/checkout@v4
- name: Download unraid-api binary tgz
uses: actions/download-artifact@v4
with:
name: unraid-api
- name: Download plugin binary tgz
uses: actions/download-artifact@v4
with:
@@ -355,7 +381,7 @@ jobs:
run: |
mkdir -p release/
mv unraid-api-*.tgz release/
mv plugins/dynamix.unraid.net.plg release/
mv plugins/production/dynamix.unraid.net.plg release/
mv archive/* release/
- name: Create Github release
+3
View File
@@ -92,3 +92,6 @@ deploy/*
!.env.example
fb_keepalive
# pnpm store
.pnpm-store
+5 -3
View File
@@ -1,11 +1,13 @@
VERSION="THIS_WILL_BE_REPLACED_WHEN_BUILT"
PATHS_UNRAID_DATA=./dev/data # Where we store plugin data (e.g. permissions.json)
PATHS_STATES=./dev/states # Where .ini files live (e.g. vars.ini)
PATHS_AUTH_SESSIONS=./dev/sessions # Where user sessions live
PATHS_AUTH_KEY=./dev/keys # Auth key directory
PATHS_DYNAMIX_BASE=./dev/dynamix # Dynamix's data directory
PATHS_DYNAMIX_CONFIG_DEFAULT=./dev/dynamix/default.cfg # Dynamix's default config file, which ships with unraid
PATHS_DYNAMIX_CONFIG=./dev/dynamix/dynamix.cfg # Dynamix's config file
PATHS_MY_SERVERS_CONFIG=./dev/Unraid.net/myservers.cfg # My servers config file
PATHS_MY_SERVERS_FB=./dev/Unraid.net/fb_keepalive # My servers flashbackup timekeeper file
PATHS_KEYFILE_BASE=./dev/Unraid.net # Keyfile location
PATHS_MACHINE_ID=./dev/data/machine-id
PORT=5000
NODE_ENV=test
NODE_ENV="test"
+2
View File
@@ -80,3 +80,5 @@ deploy/*
# IDE Settings Files
.idea
!**/*.login.*
+2
View File
@@ -0,0 +1,2 @@
node-linker=hoisted
shamefully-hoist=true
+8 -5
View File
@@ -19,15 +19,18 @@ WORKDIR /app
# Set app env
ENV NODE_ENV=development
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
COPY tsconfig.json .eslintrc.ts .prettierrc.cjs .npmrc .env.production .env.staging ./
# Install pnpm
RUN corepack enable && corepack prepare pnpm@8.15.4 --activate && npm i -g npm@latest
COPY package.json package-lock.json ./
COPY tsconfig.json .eslintrc.ts .prettierrc.cjs .npmrc .env.production .env.staging package.json pnpm-lock.yaml .npmrc ./
# Install deps
RUN npm i
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install
EXPOSE 4000
EXPOSE 3001
###########################################################
# Builder Image
@@ -39,4 +42,4 @@ ENV NODE_ENV=production
COPY . .
CMD ["npm", "run", "build-and-pack"]
CMD ["pnpm", "run", "build-and-pack"]
+17 -36
View File
@@ -1,25 +1,11 @@
x-volumes: &volumes
x-common: &common
volumes:
- ./dev:/app/dev
- ./src:/app/src
- ./package.json:/app/package.json
- ./package-lock.json:/app/package-lock.json
- ./tsconfig.json:/app/tsconfig.json
- ./vite.config.ts:/app/vite.config.ts
- ./dist/:/app/dist/
- ./deploy/:/app/deploy/
- ./README.md:/app/README.md
- ./scripts/:/app/scripts/
- ../.git/:/app/.git/
- ./.env.production:/app/.env.production
- ./.env.staging:/app/.env.staging
- ./.env.test:/app/.env.test
- ./.env.development:/app/.env.development
- ./codegen.ts:/app/codegen.ts
- ./fix-array-type.cjs:/app/fix-array-type.cjs
- /var/run/docker.sock:/var/run/docker.sock
- ./unraid-api.js:/app/unraid-api.js
- ./ecosystem.config.json:/app/ecosystem.config.json
- ./:/app
- pnpm-store:/pnpm/store
environment:
- IS_DOCKER=true
- GIT_SHA=${GIT_SHA:-unknown}
- IS_TAGGED=${IS_TAGGED:-false}
services:
@@ -31,14 +17,10 @@ services:
context: .
target: development
dockerfile: Dockerfile
<<: *volumes
<<: *common
stdin_open: true
tty: true
entrypoint: /bin/bash
environment:
- IS_DOCKER=true
- GIT_SHA=${GIT_SHA:?err}
- IS_TAGGED=${IS_TAGGED}
profiles:
- builder
@@ -50,24 +32,23 @@ services:
context: .
target: development
dockerfile: Dockerfile
<<: *volumes
<<: *common
command: npm run start:dev
environment:
- IS_DOCKER=true
- GIT_SHA=${GIT_SHA:?err}
- IS_TAGGED=${IS_TAGGED}
profiles:
- builder
builder:
image: unraid-api:builder
environment:
- GIT_SHA=${GIT_SHA:?err}
- IS_TAGGED=${IS_TAGGED}
build:
context: .
target: builder
dockerfile: Dockerfile
<<: *volumes
<<: *common
profiles:
- builder
- builder
volumes:
pnpm-store:
name: "pnpm-store"
pnpm-cache:
name: "pnpm-cache"
-18683
View File
File diff suppressed because it is too large Load Diff
+30 -24
View File
@@ -3,41 +3,45 @@
"version": "4.0.1",
"main": "src/cli/index.ts",
"type": "module",
"corepack": {
"enabled": true
},
"repository": "git@github.com:unraid/api.git",
"author": "Lime Technology, Inc. <unraid.net>",
"license": "UNLICENSED",
"engines": {
"pnpm": ">=8.15.4"
},
"scripts": {
"// Main application commands": "",
"start": "node dist/main.js",
"build:docker": "./scripts/dc.sh run --rm builder",
"dev": "vite",
"command": "pnpm run build && clear && ./dist/cli.js",
"// Build commands": "",
"build": "vite build --mode=production",
"postbuild": "chmod +x dist/main.js && chmod +x dist/cli.js",
"build-and-pack": "./scripts/build.mjs",
"build:docker": "./scripts/dc.sh run --rm builder",
"build-and-pack": "tsx ./scripts/build.ts",
"// Code generation commands": "",
"codegen": "MOTHERSHIP_GRAPHQL_LINK='https://staging.mothership.unraid.net/ws' graphql-codegen --config codegen.ts -r dotenv/config './.env.staging'",
"codegen:watch": "DOTENV_CONFIG_PATH='./.env.staging' graphql-codegen --config codegen.ts --watch -r dotenv/config",
"codegen:local": "NODE_TLS_REJECT_UNAUTHORIZED=0 MOTHERSHIP_GRAPHQL_LINK='https://mothership.localhost/ws' graphql-codegen --config codegen.ts --watch",
"// Development and quality tools": "",
"tsc": "tsc --noEmit",
"lint": "eslint --config .eslintrc.ts src/",
"lint:fix": "eslint --fix --config .eslintrc.ts src/",
"test:watch": "vitest --pool=forks",
"test": "vitest run --pool=forks",
"coverage": "vitest run --pool=forks --coverage",
"release": "standard-version",
"dev": "vite",
"command": "npm run build && clear && ./dist/cli.js",
"// Testing commands": "",
"test": "NODE_ENV=test vitest run",
"test:watch": "NODE_ENV=test vitest --ui",
"coverage": "NODE_ENV=test vitest run --coverage",
"// Container management commands": "",
"container:build": "./scripts/dc.sh build dev",
"container:start": "./scripts/dc.sh run --rm --service-ports dev",
"container:start": "pnpm run container:stop && ./scripts/dc.sh run --rm --service-ports dev",
"container:stop": "./scripts/dc.sh stop dev",
"container:test": "./scripts/dc.sh run --rm builder npm run test",
"container:test": "./scripts/dc.sh run --rm builder pnpm run test",
"container:enter": "./scripts/dc.sh exec dev /bin/bash"
},
"files": [
".env.staging",
".env.production",
"ecosystem.config.json",
"README.md",
"src",
"node_modules/"
],
"bin": {
"unraid-api": "dist/cli.js"
},
@@ -159,8 +163,8 @@
"@types/uuid": "^10.0.0",
"@types/ws": "^8.5.13",
"@types/wtfnode": "^0.7.3",
"@vitest/coverage-v8": "^2.1.8",
"@vitest/ui": "^2.1.4",
"@vitest/coverage-v8": "^3.0.5",
"@vitest/ui": "^3.0.5",
"cz-conventional-changelog": "3.3.0",
"eslint": "^9.14.0",
"eslint-plugin-no-relative-import-paths": "^1.6.1",
@@ -170,14 +174,15 @@
"nodemon": "^3.1.7",
"rollup-plugin-node-externals": "^7.1.3",
"standard-version": "^9.5.0",
"tsx": "^4.19.2",
"typescript": "^5.6.3",
"typescript-eslint": "^8.13.0",
"unplugin-swc": "^1.5.1",
"vite": "^5.4.10",
"vite": "^5.4.14",
"vite-plugin-node": "^4.0.0",
"vite-tsconfig-paths": "^5.1.0",
"vitest": "^2.1.8",
"zx": "^8.2.0"
"vitest": "^3.0.5",
"zx": "^8.3.2"
},
"optionalDependencies": {
"@vmngr/libvirt": "github:unraid/libvirt"
@@ -186,5 +191,6 @@
"eslint": {
"jiti": "2"
}
}
}
},
"packageManager": "pnpm@8.15.4+sha512.0bd3a9be9eb0e9a692676deec00a303ba218ba279d99241475616b398dbaeedd11146f92c2843458f557b1d127e09d4c171e105bdcd6b61002b39685a8016b9e"
}
+11046
View File
File diff suppressed because it is too large Load Diff
-105
View File
@@ -1,105 +0,0 @@
#!/usr/bin/env zx
import { cp, mkdir, stat, writeFile } from 'fs/promises';
import { exit } from 'process';
import { pathExists } from 'fs-extra';
import { $, cd } from 'zx';
import { getDeploymentVersion } from './get-deployment-version.mjs';
try {
// Enable colours in output
process.env.FORCE_COLOR = '1';
// Ensure we have the correct working directory
process.env.WORKDIR ??= process.env.PWD;
cd(process.env.WORKDIR);
await $`rm -rf ./deploy/release/*`;
await $`rm -rf ./deploy/pre-pack/*`;
// Create deployment directories - ignore if they already exist
await mkdir('./deploy/release', { recursive: true });
await mkdir('./deploy/pre-pack', { recursive: true });
// Build Generated Types
await $`npm run codegen`;
await $`npm run build`;
// Copy app files to plugin directory
await cp('./dist', './deploy/pre-pack/dist', { recursive: true });
// Copy environment to deployment directory
const files = [
'.env.production',
'.env.staging',
'tsconfig.json',
'codegen.ts',
'ecosystem.config.json',
'vite.config.ts',
];
for (const file of files) {
await cp(`./${file}`, `./deploy/pre-pack/${file}`);
}
// Get package details
const { name, version, devDependencies, ...rest } = await import('../package.json', {
assert: { type: 'json' },
}).then((pkg) => pkg.default);
const deploymentVersion = getDeploymentVersion(process.env, version);
// Create deployment package.json
await writeFile(
'./deploy/pre-pack/package.json',
JSON.stringify(
{
name,
version: deploymentVersion,
...rest,
},
null,
2
)
);
// # Create final tgz
await cp('./README.md', './deploy/pre-pack/README.md');
// Install production dependencies
console.log('Installing dependencies...');
$.verbose = true;
await cd('./deploy/pre-pack');
await $`npm install --omit=dev`;
// Ensure that we don't have any dev dependencies left
console.log('Installed dependencies:');
await $`npm ls --depth=0`;
console.log('Dependencies installed, packing...');
// Now we'll pack everything in the pre-pack directory to the release directory
const tarballPath = `../release/unraid-api-${deploymentVersion}.tgz`;
await $`tar -czf ${tarballPath} .`;
// Ensure the tarball exists
if (!(await pathExists(tarballPath))) {
console.error(`Failed to create tarball at ${tarballPath}`);
process.exit(1);
}
const packageSize = Math.round((await stat(tarballPath)).size / 1024 / 1024);
console.log(`Package created at: ${tarballPath} with size ${packageSize} MB`);
} catch (error) {
// Error with a command
if (Object.keys(error).includes('stderr')) {
console.log(`Failed building package. Exit code: ${error.exitCode}`);
console.log(`Error: ${error.stderr}`);
} else {
// Normal js error
console.log('Failed building package.');
console.log(`Error: ${error.message}`);
}
exit(error.exitCode);
}
+67
View File
@@ -0,0 +1,67 @@
#!/usr/bin/env zx
import { mkdir, readFile, rm, writeFile } from 'fs/promises';
import { exit } from 'process';
import { $, cd } from 'zx';
import { getDeploymentVersion } from './get-deployment-version.js';
try {
// Create release and pack directories
// Clean existing deploy folder
await rm('./deploy', { recursive: true }).catch(() => {});
await mkdir('./deploy/release', { recursive: true });
await mkdir('./deploy/pack', { recursive: true });
// Build Generated Types
await $`pnpm run codegen`;
await $`pnpm run build`;
// Copy app files to plugin directory
// Get package details
const packageJson = await readFile('./package.json', 'utf-8');
const parsedPackageJson = JSON.parse(packageJson);
const deploymentVersion = await getDeploymentVersion(process.env, parsedPackageJson.version);
// Update the package.json version to the deployment version
parsedPackageJson.version = deploymentVersion;
// Create a temporary directory for packaging
await mkdir('./deploy/pack/', { recursive: true });
await writeFile('./deploy/pack/package.json', JSON.stringify(parsedPackageJson, null, 4));
// Copy necessary files to the pack directory
await $`cp -r dist README.md .env.* ecosystem.config.json ./deploy/pack/`;
// Change to the pack directory and install dependencies
cd('./deploy/pack');
console.log('Installing production dependencies...');
$.verbose = true;
await $`pnpm install --prod`;
// chmod the cli
await $`chmod +x ./dist/cli.js`;
await $`chmod +x ./dist/main.js`;
// Create the tarball
await $`tar -czf ../release/unraid-api.tgz ./`;
// Clean up
cd('..');
} catch (error) {
// Error with a command
if (Object.keys(error).includes('stderr')) {
console.log(`Failed building package. Exit code: ${error.exitCode}`);
console.log(`Error: ${error.stderr}`);
} else {
// Normal js error
console.log('Failed building package.');
console.log(`Error: ${error.message}`);
}
exit(error.exitCode);
}
@@ -1,15 +1,16 @@
import { execSync } from 'child_process';
import { execa } from 'execa';
const runCommand = (command) => {
const runCommand = async (command: string, args: string[]) => {
try {
return execSync(command, { stdio: 'pipe' }).toString().trim();
const { stdout } = await execa(command, args);
return stdout.trim();
} catch (error) {
console.log('Failed to get value from tag command: ', command, error.message);
return;
console.log('Failed to execute command:', command, args.join(' '), error.message);
return undefined;
}
};
export const getDeploymentVersion = (env = process.env, packageVersion) => {
export const getDeploymentVersion = async (env = process.env, packageVersion: string) => {
if (env.API_VERSION) {
console.log(`Using env var for version: ${env.API_VERSION}`);
return env.API_VERSION;
@@ -17,9 +18,11 @@ export const getDeploymentVersion = (env = process.env, packageVersion) => {
console.log(`Using env vars for git tags: ${env.GIT_SHA} ${env.IS_TAGGED}`);
return env.IS_TAGGED ? packageVersion : `${packageVersion}+${env.GIT_SHA}`;
} else {
const gitShortSHA = runCommand('git rev-parse --short HEAD');
const isCommitTagged = runCommand('git describe --tags --abbrev=0 --exact-match') !== undefined;
const gitShortSHA = await runCommand('git', ['rev-parse', '--short', 'HEAD']);
const isCommitTagged = await runCommand('git', ['describe', '--tags', '--abbrev=0', '--exact-match']) !== undefined;
console.log('gitShortSHA', gitShortSHA, 'isCommitTagged', isCommitTagged);
if (!gitShortSHA) {
console.error('Failed to get git short SHA');
process.exit(1);
@@ -1,18 +1,13 @@
import { getAllowedOrigins } from '@app/common/allowed-origins';
import { store } from '@app/store/index';
import { loadConfigFile } from '@app/store/modules/config';
import { loadStateFiles } from '@app/store/modules/emhttp';
import 'reflect-metadata';
import { expect, test } from 'vitest';
// Preloading imports for faster tests
import '@app/common/allowed-origins';
import '@app/store/modules/emhttp';
import '@app/store';
test('Returns allowed origins', async () => {
const { store } = await import('@app/store');
const { loadStateFiles } = await import('@app/store/modules/emhttp');
const { getAllowedOrigins } = await import('@app/common/allowed-origins');
const { loadConfigFile } = await import('@app/store/modules/config');
// Load state files into store
await store.dispatch(loadStateFiles());
await store.dispatch(loadConfigFile());
@@ -1,14 +1,15 @@
import { expect, test, vi } from 'vitest';
import { getArrayData } from '@app/core/modules/array/get-array-data';
import { store } from '@app/store';
import { loadConfigFile } from '@app/store/modules/config';
import { loadStateFiles } from '@app/store/modules/emhttp';
vi.mock('@app/core/pubsub', () => ({
pubsub: { publish: vi.fn() },
}));
test('Creates an array event', async () => {
const { getArrayData } = await import('@app/core/modules/array/get-array-data');
const { store } = await import('@app/store');
const { loadStateFiles } = await import('@app/store/modules/emhttp');
const { loadConfigFile } = await import('@app/store/modules/config');
// Load state files into store
await store.dispatch(loadStateFiles());
@@ -1,15 +0,0 @@
import { expect, test } from 'vitest';
import { setupNotificationWatch } from '@app/core/modules/notifications/setup-notification-watch';
import { sleep } from '@app/core/utils/misc/sleep';
import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-file';
import { store } from '@app/store/index';
test('loads notifications properly', async () => {
await store.dispatch(loadDynamixConfigFile()).unwrap();
const watch = await setupNotificationWatch();
expect(watch).not.toBeNull();
await sleep(400);
expect(store.getState().notifications.notifications).toMatchSnapshot();
await watch?.close();
});
+1 -1
View File
@@ -1,4 +1,4 @@
import uniq from 'lodash/uniq';
import { uniq } from 'lodash-es';
import type { RootState } from '@app/store';
import { logger } from '@app/core';
+1 -1
View File
@@ -80,5 +80,5 @@ export const KEYSERVER_VALIDATION_ENDPOINT = 'https://keys.lime-technology.com/v
/** Set the max retries for the GraphQL Client */
export const MAX_RETRIES_FOR_LINEAR_BACKOFF = 100;
export const PM2_PATH = join(import.meta.dirname, '../../', 'node_modules', '.bin', 'pm2');
export const PM2_PATH = join(import.meta.dirname, '../../', 'node_modules', 'pm2', 'bin', 'pm2');
export const ECOSYSTEM_PATH = join(import.meta.dirname, '../../', 'ecosystem.config.json');
+1 -1
View File
@@ -1,5 +1,5 @@
import { GraphQLError } from 'graphql';
import sum from 'lodash/sum';
import { sum } from 'lodash-es';
import type { ArrayCapacity, ArrayType } from '@app/graphql/generated/api/types';
import { getServerIdentifier } from '@app/core/utils/server-identifier';
@@ -1,37 +0,0 @@
import { join } from 'node:path';
import type { FSWatcher } from 'chokidar';
import { watch } from 'chokidar';
import { getters, store } from '@app/store/index';
import { clearNotification, loadNotification } from '@app/store/modules/notifications';
import { FileLoadStatus } from '@app/store/types';
const handleNotificationAdd = (path: string) => {
store.dispatch(loadNotification({ path }));
};
const handleNotificationRemove = (path: string) => {
store.dispatch(clearNotification({ path }));
};
let watcher: FSWatcher | null = null;
export const setupNotificationWatch = async (): Promise<FSWatcher | null> => {
const { notify, status } = getters.dynamix();
if (status === FileLoadStatus.LOADED && notify?.path) {
if (watcher) {
await watcher.close();
}
watcher = watch(join(notify.path, 'unread'), {})
.on('add', (path) => {
handleNotificationAdd(path);
})
.on('unlink', (path) => {
handleNotificationRemove(path);
});
return watcher;
}
return null;
};
@@ -1,5 +1,4 @@
import { isEqual } from 'lodash-es';
import merge from 'lodash/merge';
import { isEqual, merge } from 'lodash-es';
import { getAllowedOrigins } from '@app/common/allowed-origins';
import { initialState } from '@app/store/modules/config';
+1 -1
View File
@@ -1,5 +1,5 @@
import type { DiskShare, Share, UserShare } from '@app/core/types/states/share';
import { type ArrayDisk } from '@app/graphql/generated/api/types';
import type { ArrayDisk } from '@app/graphql/generated/api/types';
import { getters } from '@app/store';
const processors = {
+6 -1
View File
@@ -13,7 +13,12 @@ import type { TypedDocumentNode as DocumentNode } from '@graphql-typed-document-
* Therefore it is highly recommended to use the babel or swc plugin for production.
* Learn more about it here: https://the-guild.dev/graphql/codegen/plugins/presets/preset-client#reducing-bundle-size
*/
const documents = {
type Documents = {
"\n mutation sendRemoteGraphQLResponse($input: RemoteGraphQLServerInput!) {\n remoteGraphQLResponse(input: $input)\n }\n": typeof types.sendRemoteGraphQLResponseDocument,
"\n fragment RemoteGraphQLEventFragment on RemoteGraphQLEvent {\n remoteGraphQLEventData: data {\n type\n body\n sha256\n }\n }\n": typeof types.RemoteGraphQLEventFragmentFragmentDoc,
"\n subscription events {\n events {\n __typename\n ... on ClientConnectedEvent {\n connectedData: data {\n type\n version\n apiKey\n }\n connectedEvent: type\n }\n ... on ClientDisconnectedEvent {\n disconnectedData: data {\n type\n version\n apiKey\n }\n disconnectedEvent: type\n }\n ...RemoteGraphQLEventFragment\n }\n }\n": typeof types.eventsDocument,
};
const documents: Documents = {
"\n mutation sendRemoteGraphQLResponse($input: RemoteGraphQLServerInput!) {\n remoteGraphQLResponse(input: $input)\n }\n": types.sendRemoteGraphQLResponseDocument,
"\n fragment RemoteGraphQLEventFragment on RemoteGraphQLEvent {\n remoteGraphQLEventData: data {\n type\n body\n sha256\n }\n }\n": types.RemoteGraphQLEventFragmentFragmentDoc,
"\n subscription events {\n events {\n __typename\n ... on ClientConnectedEvent {\n connectedData: data {\n type\n version\n apiKey\n }\n connectedEvent: type\n }\n ... on ClientDisconnectedEvent {\n disconnectedData: data {\n type\n version\n apiKey\n }\n disconnectedEvent: type\n }\n ...RemoteGraphQLEventFragment\n }\n }\n": types.eventsDocument,
@@ -8,7 +8,6 @@ import { enableArrayEventListener } from '@app/store/listeners/array-event-liste
import { enableConfigFileListener } from '@app/store/listeners/config-listener';
import { enableDynamicRemoteAccessListener } from '@app/store/listeners/dynamic-remote-access-listener';
import { enableMothershipJobsListener } from '@app/store/listeners/mothership-subscription-listener';
import { enableNotificationPathListener } from '@app/store/listeners/notification-path-listener';
import { enableServerStateListener } from '@app/store/listeners/server-state-listener';
import { enableUpnpListener } from '@app/store/listeners/upnp-listener';
import { enableVersionListener } from '@app/store/listeners/version-listener';
@@ -35,5 +34,4 @@ export const startMiddlewareListeners = () => {
enableArrayEventListener();
enableWanAccessChangeListener();
enableServerStateListener();
enableNotificationPathListener();
};
@@ -1,4 +1,4 @@
import isEqual from 'lodash/isEqual';
import { isEqual } from 'lodash-es';
import { minigraphLogger } from '@app/core/log';
import { MinigraphStatus } from '@app/graphql/generated/api/types';
@@ -1,4 +1,4 @@
import isEqual from 'lodash/isEqual';
import { isEqual } from 'lodash-es';
import { mothershipLogger } from '@app/core/log';
import { pubsub, PUBSUB_CHANNEL } from '@app/core/pubsub';
+1 -2
View File
@@ -4,8 +4,7 @@ import { access } from 'fs/promises';
import type { PayloadAction } from '@reduxjs/toolkit';
import { createAsyncThunk, createSlice, isAnyOf } from '@reduxjs/toolkit';
import { isEqual } from 'lodash-es';
import merge from 'lodash/merge';
import { isEqual, merge } from 'lodash-es';
import type { Owner } from '@app/graphql/generated/api/types';
import { logger } from '@app/core/log';
+1 -1
View File
@@ -1,6 +1,6 @@
import type { PayloadAction } from '@reduxjs/toolkit';
import { createSlice } from '@reduxjs/toolkit';
import merge from 'lodash/merge';
import { merge } from 'lodash-es';
import { type DynamixConfig } from '@app/core/types/ini';
import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-file';
+28 -36
View File
@@ -2,7 +2,7 @@ import { join } from 'path';
import type { PayloadAction } from '@reduxjs/toolkit';
import { createAsyncThunk, createSlice } from '@reduxjs/toolkit';
import merge from 'lodash/merge';
import { merge } from 'lodash-es';
import type { RootState } from '@app/store';
import type { StateFileToIniParserMap } from '@app/store/types';
@@ -17,15 +17,6 @@ import { type Users } from '@app/core/types/states/user';
import { type Var } from '@app/core/types/states/var';
import { parseConfig } from '@app/core/utils/misc/parse-config';
import { type ArrayDisk } from '@app/graphql/generated/api/types';
import { parse as parseDevices } from '@app/store/state-parsers/devices';
import { parse as parseNetwork } from '@app/store/state-parsers/network';
import { parse as parseNfsShares } from '@app/store/state-parsers/nfs';
import { parse as parseNginx } from '@app/store/state-parsers/nginx';
import { parse as parseShares } from '@app/store/state-parsers/shares';
import { parse as parseSlots } from '@app/store/state-parsers/slots';
import { parse as parseSmbShares } from '@app/store/state-parsers/smb';
import { parse as parseUsers } from '@app/store/state-parsers/users';
import { parse as parseVar } from '@app/store/state-parsers/var';
import { FileLoadStatus, StateFileKey } from '@app/store/types';
export type SliceState = {
@@ -54,26 +45,28 @@ const initialState: SliceState = {
nfsShares: [],
};
export const parsers: StateFileToIniParserMap = {
[StateFileKey.var]: parseVar,
[StateFileKey.devs]: parseDevices,
[StateFileKey.network]: parseNetwork,
[StateFileKey.nginx]: parseNginx,
[StateFileKey.shares]: parseShares,
[StateFileKey.disks]: parseSlots,
[StateFileKey.users]: parseUsers,
[StateFileKey.sec]: parseSmbShares,
[StateFileKey.sec_nfs]: parseNfsShares,
export const parsers: {
[K in StateFileKey]: () => Promise<StateFileToIniParserMap[K]>;
} = {
[StateFileKey.var]: async () => (await import('@app/store/state-parsers/var')).parse,
[StateFileKey.devs]: async () => (await import('@app/store/state-parsers/devices')).parse,
[StateFileKey.network]: async () => (await import('@app/store/state-parsers/network')).parse,
[StateFileKey.nginx]: async () => (await import('@app/store/state-parsers/nginx')).parse,
[StateFileKey.shares]: async () => (await import('@app/store/state-parsers/shares')).parse,
[StateFileKey.disks]: async () => (await import('@app/store/state-parsers/slots')).parse,
[StateFileKey.users]: async () => (await import('@app/store/state-parsers/users')).parse,
[StateFileKey.sec]: async () => (await import('@app/store/state-parsers/smb')).parse,
[StateFileKey.sec_nfs]: async () => (await import('@app/store/state-parsers/nfs')).parse,
};
const getParserFunction = (parser: StateFileKey): StateFileToIniParserMap[StateFileKey] =>
parsers[parser];
const getParserFunction = async (parser: StateFileKey): Promise<StateFileToIniParserMap[StateFileKey]> =>
await parsers[parser]();
const parseState = <T extends StateFileKey, Q = ReturnType<StateFileToIniParserMap[T]> | null>(
const parseState = async <T extends StateFileKey, Q = ReturnType<StateFileToIniParserMap[T]> | null>(
statesDirectory: string,
parser: T,
defaultValue?: NonNullable<Q>
): Q => {
): Promise<Q> => {
const filePath = join(statesDirectory, `${parser}.ini`);
try {
@@ -82,8 +75,7 @@ const parseState = <T extends StateFileKey, Q = ReturnType<StateFileToIniParserM
filePath,
type: 'ini',
});
const parserFn = getParserFunction(parser);
// @TODO Not sure why this type doesn't work
const parserFn = await getParserFunction(parser);
return parserFn(config as unknown as any) as Q;
} catch (error: unknown) {
emhttpLogger.error(
@@ -106,7 +98,7 @@ export const loadSingleStateFile = createAsyncThunk<any, StateFileKey, { state:
async (stateFileKey, { getState }) => {
const path = getState().paths.states;
const config = parseState(path, stateFileKey);
const config = await parseState(path, stateFileKey);
if (config) {
switch (stateFileKey) {
case StateFileKey.var:
@@ -145,15 +137,15 @@ export const loadStateFiles = createAsyncThunk<
>('emhttp/load-state-file', async (_, { getState }) => {
const path = getState().paths.states;
const state: Omit<SliceState, 'mode' | 'status'> = {
var: parseState(path, StateFileKey.var, {} as Var),
devices: parseState(path, StateFileKey.devs, []),
networks: parseState(path, StateFileKey.network, []),
nginx: parseState(path, StateFileKey.nginx, {} as Nginx),
shares: parseState(path, StateFileKey.shares, []),
disks: parseState(path, StateFileKey.disks, []),
users: parseState(path, StateFileKey.users, []),
smbShares: parseState(path, StateFileKey.sec, []),
nfsShares: parseState(path, StateFileKey.sec_nfs, []),
var: await parseState(path, StateFileKey.var, {} as Var),
devices: await parseState(path, StateFileKey.devs, []),
networks: await parseState(path, StateFileKey.network, []),
nginx: await parseState(path, StateFileKey.nginx, {} as Nginx),
shares: await parseState(path, StateFileKey.shares, []),
disks: await parseState(path, StateFileKey.disks, []),
users: await parseState(path, StateFileKey.users, []),
smbShares: await parseState(path, StateFileKey.sec, []),
nfsShares: await parseState(path, StateFileKey.sec_nfs, []),
};
return state;
+1 -1
View File
@@ -2,7 +2,7 @@ import { format } from 'util';
import type { PayloadAction } from '@reduxjs/toolkit';
import { createAsyncThunk, createSlice } from '@reduxjs/toolkit';
import merge from 'lodash/merge';
import { merge } from 'lodash-es';
import type { RootState } from '@app/store';
import { logger } from '@app/core/log';
+1 -1
View File
@@ -1,4 +1,4 @@
import isEqual from 'lodash/isEqual';
import { isEqual } from 'lodash-es';
import type { StoreSubscriptionHandler } from '@app/store/types';
import { logger } from '@app/core/log';
@@ -0,0 +1,578 @@
<?php
// Included in login.php
// Only start a session to check if they have a cookie that looks like our session
$server_name = strtok($_SERVER['HTTP_HOST'],":");
if (!empty($_COOKIE['unraid_'.md5($server_name)])) {
// Start the session so we can check if $_SESSION has data
if (session_status()==PHP_SESSION_NONE) session_start();
// Check if the user is already logged in
if ($_SESSION && !empty($_SESSION['unraid_user'])) {
// Redirect the user to the start page
header("Location: /".$start_page);
exit;
}
}
function readFromFile($file): string {
$text = "";
if (file_exists($file) && filesize($file) > 0) {
$fp = fopen($file,"r");
if (flock($fp, LOCK_EX)) {
$text = fread($fp, filesize($file));
flock($fp, LOCK_UN);
fclose($fp);
}
}
return $text;
}
function appendToFile($file, $text): void {
$fp = fopen($file,"a");
if (flock($fp, LOCK_EX)) {
fwrite($fp, $text);
fflush($fp);
flock($fp, LOCK_UN);
fclose($fp);
}
}
function writeToFile($file, $text): void {
$fp = fopen($file,"w");
if (flock($fp, LOCK_EX)) {
fwrite($fp, $text);
fflush($fp);
flock($fp, LOCK_UN);
fclose($fp);
}
}
// Source: https://stackoverflow.com/a/2524761
function isValidTimeStamp($timestamp)
{
return ((string) (int) $timestamp === $timestamp)
&& ($timestamp <= PHP_INT_MAX)
&& ($timestamp >= ~PHP_INT_MAX);
}
function cleanupFails(string $failFile, int $time): int {
global $cooldown;
// Read existing fails
@mkdir(dirname($failFile), 0755);
$failText = readFromFile($failFile);
$fails = explode("\n", trim($failText));
// Remove entries older than $cooldown minutes, and entries that are not timestamps
$updateFails = false;
foreach ((array) $fails as $key => $value) {
if ( !isValidTimeStamp($value) || ($time - $value > $cooldown) || ($value > $time) ) {
unset ($fails[$key]);
$updateFails = true;
}
}
// Save fails to disk
if ($updateFails) {
$failText = implode("\n", $fails)."\n";
writeToFile($failFile, $failText);
}
return count($fails);
}
function verifyUsernamePassword(string $username, string $password): bool {
if ($username != "root") return false;
$output = exec("/usr/bin/getent shadow $username");
if ($output === false) return false;
$credentials = explode(":", $output);
return password_verify($password, $credentials[1]);
}
function verifyTwoFactorToken(string $username, string $token): bool {
try {
// Create curl client
$curlClient = curl_init();
curl_setopt($curlClient, CURLOPT_HEADER, true);
curl_setopt($curlClient, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curlClient, CURLOPT_UNIX_SOCKET_PATH, '/var/run/unraid-api.sock');
curl_setopt($curlClient, CURLOPT_URL, 'http://unixsocket/verify');
curl_setopt($curlClient, CURLOPT_BUFFERSIZE, 256);
curl_setopt($curlClient, CURLOPT_TIMEOUT, 5);
curl_setopt($curlClient, CURLOPT_HTTPHEADER, array('Content-Type:application/json', 'Origin: /var/run/unraid-notifications.sock'));
curl_setopt($curlClient, CURLOPT_POSTFIELDS, json_encode([
'username' => $username,
'token' => $token
]));
// Send the request
curl_exec($curlClient);
// Get the http status code
$httpCode = curl_getinfo($curlClient, CURLINFO_HTTP_CODE);
// Close the connection
curl_close($curlClient);
// Error
// This should accept 200 or 204 status codes
if ($httpCode !== 200 && $httpCode !== 204) {
// Log error to syslog
my_logger("2FA code for {$username} is invalid, blocking access!");
return false;
}
// Log success to syslog
my_logger("2FA code for {$username} is valid, allowing login!");
// Success
return true;
} catch (Exception $exception) {
// Error
return false;
}
}
// Check if a haystack ends in a needle
function endsWith($haystack, $needle): bool {
return substr_compare($haystack, $needle, -strlen($needle)) === 0;
}
// Check if we're accessing this via a wildcard cert
function isWildcardCert(): bool {
global $server_name;
return endsWith($server_name, '.myunraid.net');
}
// Check if we're accessing this locally via the expected myunraid.net url
function isLocalAccess(): bool {
global $nginx, $server_name;
return isWildcardCert() && $nginx['NGINX_LANFQDN'] === $server_name;
}
// Check if we're accessing this remotely via the expected myunraid.net url
function isRemoteAccess(): bool {
global $nginx, $server_name;
return isWildcardCert() && $nginx['NGINX_WANFQDN'] === $server_name;
}
// Check if 2fa is enabled for local (requires USE_SSL to be "auto" so no alternate urls can access the server)
function isLocalTwoFactorEnabled(): bool {
global $nginx, $my_servers;
return $nginx['NGINX_USESSL'] === "auto" && ($my_servers['local']['2Fa']??'') === 'yes';
}
// Check if 2fa is enabled for remote
function isRemoteTwoFactorEnabled(): bool {
global $my_servers;
return ($my_servers['remote']['2Fa']??'') === 'yes';
}
// Load configs into memory
$my_servers = @parse_ini_file('/boot/config/plugins/dynamix.my.servers/myservers.cfg', true);
$nginx = @parse_ini_file('/var/local/emhttp/nginx.ini');
// Vars
$maxFails = 3;
$cooldown = 15 * 60; // 15 mins
$remote_addr = $_SERVER['REMOTE_ADDR'] ?? "unknown";
$failFile = "/var/log/pwfail/{$remote_addr}";
// Get the credentials
$username = $_POST['username']??'';
$password = $_POST['password']??'';
$token = $_REQUEST['token']??'';
// Check if we need 2fa
$twoFactorRequired = (isLocalAccess() && isLocalTwoFactorEnabled()) || (isRemoteAccess() && isRemoteTwoFactorEnabled());
// If we have a username + password combo attempt to login
if (!empty($username) && !empty($password)) {
try {
// Bail if we're missing the 2FA token and we expect one
if (isWildcardCert() && $twoFactorRequired && empty($token)) throw new Exception(_('No 2FA token detected'));
// Read existing fails, cleanup expired ones
$time = time();
$failCount = cleanupFails($failFile, $time);
// Check if we're limited
if ($failCount >= $maxFails) {
if ($failCount == $maxFails) my_logger("Ignoring login attempts for {$username} from {$remote_addr}");
throw new Exception(_('Too many invalid login attempts'));
}
// Bail if username + password combo doesn't work
if (!verifyUsernamePassword($username, $password)) throw new Exception(_('Invalid username or password'));
// Bail if we need a token but it's invalid
if (isWildcardCert() && $twoFactorRequired && !verifyTwoFactorToken($username, $token)) throw new Exception(_('Invalid 2FA token'));
// Successful login, start session
@unlink($failFile);
if (session_status()==PHP_SESSION_NONE) session_start();
$_SESSION['unraid_login'] = time();
$_SESSION['unraid_user'] = $username;
session_regenerate_id(true);
session_write_close();
my_logger("Successful login user {$username} from {$remote_addr}");
// Redirect the user to the start page
header("Location: /".$start_page);
exit;
} catch (Exception $exception) {
// Set error message
$error = $exception->getMessage();
// Log error to syslog
my_logger("Unsuccessful login user {$username} from {$remote_addr}");
appendToFile($failFile, $time."\n");
}
}
$boot = "/boot/config/plugins/dynamix";
$myFile = "case-model.cfg";
$myCase = file_exists("$boot/$myFile") ? file_get_contents("$boot/$myFile") : false;
extract(parse_plugin_cfg('dynamix', true));
$theme_dark = in_array($display['theme'], ['black', 'gray']);
?>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="content-type" content="text/html; charset=UTF-8">
<meta http-equiv="Cache-Control" content="no-cache">
<meta http-equiv="Pragma" content="no-cache">
<meta http-equiv="Expires" content="0">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width,initial-scale=1">
<meta name="robots" content="noindex, nofollow">
<meta http-equiv="Content-Security-Policy" content="block-all-mixed-content">
<meta name="referrer" content="same-origin">
<title><?=$var['NAME']?>/Login</title>
<style>
/************************
/
/ Fonts
/
/************************/
@font-face{font-family:clear-sans;font-weight:normal;font-style:normal; src:url('/webGui/styles/clear-sans.woff?v=20220513') format('woff')}
@font-face{font-family:clear-sans;font-weight:bold;font-style:normal; src:url('/webGui/styles/clear-sans-bold.woff?v=20220513') format('woff')}
@font-face{font-family:clear-sans;font-weight:normal;font-style:italic; src:url('/webGui/styles/clear-sans-italic.woff?v=20220513') format('woff')}
@font-face{font-family:clear-sans;font-weight:bold;font-style:italic; src:url('/webGui/styles/clear-sans-bold-italic.woff?v=20220513') format('woff')}
@font-face{font-family:bitstream;font-weight:normal;font-style:normal; src:url('/webGui/styles/bitstream.woff?v=20220513') format('woff')}
@font-face{font-family:bitstream;font-weight:bold;font-style:normal; src:url('/webGui/styles/bitstream-bold.woff?v=20220513') format('woff')}
@font-face{font-family:bitstream;font-weight:normal;font-style:italic; src:url('/webGui/styles/bitstream-italic.woff?v=20220513') format('woff')}
@font-face{font-family:bitstream;font-weight:bold;font-style:italic; src:url('/webGui/styles/bitstream-bold-italic.woff?v=20220513') format('woff')}
/************************
/
/ General styling
/
/************************/
body {
background: <?=$theme_dark?'#1C1B1B':'#F2F2F2'?>;
color: <?=$theme_dark?'#fff':'#1c1b1b'?>;
font-family: clear-sans, sans-serif;
font-size: .875rem;
padding: 0;
margin: 0;
}
a {
text-transform: uppercase;
font-weight: bold;
letter-spacing: 2px;
color: #FF8C2F;
text-decoration: none;
}
a:hover {
color: #f15a2c;
}
h1 {
font-size: 1.8em;
margin: 0;
}
h2 {
font-size: 0.8em;
margin-top: 0;
margin-bottom: 1.8em;
}
.button {
color: #ff8c2f;
font-family: clear-sans, sans-serif;
background: -webkit-gradient(linear,left top,right top,from(#e03237),to(#fd8c3c)) 0 0 no-repeat,-webkit-gradient(linear,left top,right top,from(#e03237),to(#fd8c3c)) 0 100% no-repeat,-webkit-gradient(linear,left bottom,left top,from(#e03237),to(#e03237)) 0 100% no-repeat,-webkit-gradient(linear,left bottom,left top,from(#fd8c3c),to(#fd8c3c)) 100% 100% no-repeat;
background: linear-gradient(90deg,#e03237 0,#fd8c3c) 0 0 no-repeat,linear-gradient(90deg,#e03237 0,#fd8c3c) 0 100% no-repeat,linear-gradient(0deg,#e03237 0,#e03237) 0 100% no-repeat,linear-gradient(0deg,#fd8c3c 0,#fd8c3c) 100% 100% no-repeat;
background-size: 100% 2px,100% 2px,2px 100%,2px 100%;
}
.button:hover {
color: #fff;
background-color: #f15a2c;
background: -webkit-gradient(linear,left top,right top,from(#e22828),to(#ff8c2f));
background: linear-gradient(90deg,#e22828 0,#ff8c2f);
-webkit-box-shadow: none;
box-shadow: none;
cursor: pointer;
}
.button--small {
font-size: .875rem;
font-weight: 600;
line-height: 1;
text-transform: uppercase;
letter-spacing: 2px;
text-align: center;
text-decoration: none;
display: inline-block;
background-color: transparent;
border-radius: .125rem;
border: 0;
-webkit-transition: none;
transition: none;
padding: .75rem 1.5rem;
}
[type=email], [type=number], [type=password], [type=search], [type=tel], [type=text], [type=url], textarea {
font-family: clear-sans, sans-serif;
font-size: .875rem;
background-color: #F2F2F2;
width: 100%;
margin-bottom: 1rem;
border: 2px solid #ccc;
padding: .75rem 1rem;
-webkit-box-sizing: border-box;
box-sizing: border-box;
border-radius: 0;
-webkit-appearance: none;
}
[type=email]:active, [type=email]:focus, [type=number]:active, [type=number]:focus, [type=password]:active, [type=password]:focus, [type=search]:active, [type=search]:focus, [type=tel]:active, [type=tel]:focus, [type=text]:active, [type=text]:focus, [type=url]:active, [type=url]:focus, textarea:active, textarea:focus {
border-color: #ff8c2f;
outline: none;
}
/************************
/
/ Login specific styling
/
/************************/
#login {
width: 500px;
margin: 6rem auto;
border-radius: 10px;
background: <?=$theme_dark?'#2B2A29':'#fff'?>;
}
#login::after {
content: "";
clear: both;
display: table;
}
#login .logo {
position: relative;
overflow: hidden;
height: 120px;
border-radius: 10px 10px 0 0;
}
#login .wordmark {
z-index: 1;
position: relative;
padding: 2rem;
}
#login .wordmark svg {
width: 100px;
}
#login .case {
float: right;
width: 30%;
font-size: 6rem;
text-align: center;
}
#login .case img {
max-width: 96px;
max-height: 96px;
}
#login .error {
color: red;
margin-top: -20px;
}
#login .content {
padding: 2rem;
}
#login .form {
width: 65%;
}
.angle:after {
content: "";
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 120px;
background-color: #f15a2c;
background: -webkit-gradient(linear,left top,right top,from(#e22828),to(#ff8c2f));
background: linear-gradient(90deg,#e22828 0,#ff8c2f);
-webkit-transform-origin: bottom left;
transform-origin: bottom left;
-webkit-transform: skewY(-6deg);
transform: skewY(-6deg);
-webkit-transition: -webkit-transform .15s linear;
transition: -webkit-transform .15s linear;
transition: transform .15s linear;
transition: transform .15s linear,-webkit-transform .15s linear;
}
.shadow {
-webkit-box-shadow: 0 2px 8px 0 rgba(0,0,0,.12);
box-shadow: 0 2px 8px 0 rgba(0,0,0,.12);
}
.hidden { display: none; }
/************************
/
/ Cases
/
/************************/
[class^="case-"], [class*=" case-"] {
/* use !important to prevent issues with browser extensions that change fonts */
font-family: 'cases' !important;
speak: none;
font-style: normal;
font-weight: normal;
font-variant: normal;
text-transform: none;
line-height: 1;
/* Better Font Rendering =========== */
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
/************************
/
/ Media queries for mobile responsive
/
/************************/
@media (max-width: 500px) {
body {
background: <?=$theme_dark?'#2B2A29':'#fff'?>;
}
[type=email], [type=number], [type=password], [type=search], [type=tel], [type=text], [type=url], textarea {
font-size: 16px; /* This prevents the mobile browser from zooming in on the input-field. */
}
#login {
margin: 0;
border-radius: 0;
width: 100%;
}
#login .logo {
border-radius: 0;
}
.shadow {
box-shadow: none;
}
}
</style>
<link type="text/css" rel="stylesheet" href="<?autov("/webGui/styles/default-cases.css")?>">
<link type="image/png" rel="shortcut icon" href="/webGui/images/<?=$var['mdColor']?>.png">
</head>
<body>
<section id="login" class="shadow">
<div class="logo angle">
<div class="wordmark"><svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 222.4 39" class="Nav__logo--white"><path fill="#ffffff" d="M146.70000000000002 29.5H135l-3 9h-6.5L138.9 0h8l13.4 38.5h-7.1L142.6 6.9l-5.8 16.9h8.2l1.7 5.7zM29.7 0v25.4c0 8.9-5.8 13.6-14.9 13.6C5.8 39 0 34.3 0 25.4V0h6.5v25.4c0 5.2 3.2 7.9 8.2 7.9 5.2 0 8.4-2.7 8.4-7.9V0h6.6zM50.9 12v26.5h-6.5V0h6.1l17 26.5V0H74v38.5h-6.1L50.9 12zM171.3 0h6.5v38.5h-6.5V0zM222.4 24.7c0 9-5.9 13.8-15.2 13.8h-14.5V0h14.6c9.2 0 15.1 4.8 15.1 13.8v10.9zm-6.6-10.9c0-5.3-3.3-8.1-8.5-8.1h-8.1v27.1h8c5.3 0 8.6-2.8 8.6-8.1V13.8zM108.3 23.9c4.3-1.6 6.9-5.3 6.9-11.5 0-8.7-5.1-12.4-12.8-12.4H88.8v38.5h6.5V5.7h6.9c3.8 0 6.2 1.8 6.2 6.7s-2.4 6.8-6.2 6.8h-3.4l9.2 19.4h7.5l-7.2-14.7z"></path></svg></div>
</div>
<div class="content">
<h1>
<?=htmlspecialchars($var['NAME'])?>
</h1>
<h2>
<?=htmlspecialchars($var['COMMENT'])?>
</h2>
<div class="case">
<?if ($myCase):?>
<?if (substr($myCase,-4)!='.png'):?>
<span class='case-<?=$myCase?>'></span>
<?else:?>
<img src='<?=autov("/webGui/images/$myCase")?>'>
<?endif;?>
<?endif;?>
</div>
<div class="form">
<form class="js-removeTimeout" action="/login" method="POST">
<? if (($twoFactorRequired && !empty($token)) || !$twoFactorRequired) { ?>
<p>
<input name="username" type="text" placeholder="<?=_('Username')?>" autocapitalize="none" autocomplete="off" spellcheck="false" autofocus required>
<input name="password" type="password" placeholder="<?=_('Password')?>" required>
<? if ($twoFactorRequired && !empty($token)) { ?>
<input name="token" type="hidden" value="<?= $token ?>">
<? } ?>
</p>
<? if ($error) echo "<p class='error'>$error</p>"; ?>
<p>
<button type="submit" class="button button--small"><?=_('Login')?></button>
</p>
<? } else { ?>
<? if ($error) { ?>
<div>
<p class="error" style="padding-top:10px;"><?= $error ?></p>
</div>
<? } else { ?>
<div>
<p class="error" style="padding-top:10px;" title="<?= _('Please access this server via the My Servers Dashboard') ?>"><?= _('No 2FA token detected') ?></p>
</div>
<? } ?>
<div>
<a href="https://forums.unraid.net/my-servers/" class="button button--small" title="<?=_('Go to My Servers Dashboard')?>"><?=_('Go to My Servers Dashboard')?></a>
</div>
<? } ?>
<script type="text/javascript">
document.cookie = "cookietest=1";
cookieEnabled = document.cookie.indexOf("cookietest=")!=-1;
document.cookie = "cookietest=1; expires=Thu, 01-Jan-1970 00:00:01 GMT";
if (!cookieEnabled) {
const errorElement = document.createElement('p');
errorElement.classList.add('error');
errorElement.textContent = "<?=_('Please enable cookies to use the Unraid webGUI')?>";
document.body.textContent = '';
document.body.appendChild(errorElement);
}
</script>
</form>
<? if (($twoFactorRequired && !empty($token)) || !$twoFactorRequired) { ?>
<div class="js-addTimeout hidden">
<p class="error" style="padding-top:10px;"><?=_('Transparent 2FA Token timed out')?></p>
<a href="https://forums.unraid.net/my-servers/" class="button button--small" title="<?=_('Go to My Servers Dashboard')?>"><?=_('Go to My Servers Dashboard')?></a>
</div>
<? } ?>
</div>
<? if (($twoFactorRequired && !empty($token)) || !$twoFactorRequired) { ?>
<p class="js-removeTimeout"><a href="https://docs.unraid.net/go/lost-root-password/" target="_blank"><?=_('Password recovery')?></a></p>
<? } ?>
</div>
</section>
<? if ($twoFactorRequired && !empty($token)) { ?>
<script type="text/javascript">
const $elsToRemove = document.querySelectorAll('.js-removeTimeout');
const $elsToShow = document.querySelectorAll('.js-addTimeout');
/**
* A user can manually refresh the page or submit with the wrong username/password
* the t2fa token will be re-used on these page refreshes. We need to keep track of the timeout across potential page
* loads rather than setting the timer with a fresh timeout each page load
*/
const tokenName = '<?=$token?>'.slice(-20);
const ts = Date.now();
const timeoutStarted = sessionStorage.getItem(tokenName) ? Number(sessionStorage.getItem(tokenName)) : ts;
const timeoutDiff = ts - timeoutStarted; // current timestamp minus timestamp when token first set
const timeoutMS = 297000 - timeoutDiff; // 5 minutes minus 3seconds or (5*60)*1000ms - 3000ms = 297000
sessionStorage.setItem(tokenName, timeoutStarted);
const tokenTimeout = setTimeout(() => {
$elsToRemove.forEach(z => z.remove()); // remove elements
$elsToShow.forEach(z => z.classList.remove('hidden')); // add elements
}, timeoutMS); // if timeoutMS is negative value the timeout will trigger immediately
</script>
<? } ?>
</body>
</html>
@@ -0,0 +1,624 @@
<?php
function verifyUsernamePasswordAndSSO(string $username, string $password): bool {
if ($username != "root") return false;
$output = exec("/usr/bin/getent shadow $username");
if ($output === false) return false;
$credentials = explode(":", $output);
$valid = password_verify($password, $credentials[1]);
if ($valid) {
return true;
}
// We may have an SSO token, attempt validation
if (strlen($password) > 800) {
if (!preg_match('/^[A-Za-z0-9-_]+.[A-Za-z0-9-_]+.[A-Za-z0-9-_]+$/', $password)) {
my_logger("SSO Login Attempt Failed: Invalid token format");
return false;
}
$safePassword = escapeshellarg($password);
$output = array();
exec("/etc/rc.d/rc.unraid-api sso validate-token $safePassword 2>&1", $output, $code);
my_logger("SSO Login Attempt Code: $code");
my_logger("SSO Login Attempt Response: " . print_r($output, true));
if ($code !== 0) {
return false;
}
if (empty($output)) {
return false;
}
try {
$response = json_decode($output[0], true);
if (isset($response['valid']) && $response['valid'] === true) {
return true;
}
} catch (Exception $e) {
my_logger("SSO Login Attempt Exception: " . $e->getMessage());
return false;
}
}
return false;
}
// Included in login.php
// Only start a session to check if they have a cookie that looks like our session
$server_name = strtok($_SERVER['HTTP_HOST'],":");
if (!empty($_COOKIE['unraid_'.md5($server_name)])) {
// Start the session so we can check if $_SESSION has data
if (session_status()==PHP_SESSION_NONE) session_start();
// Check if the user is already logged in
if ($_SESSION && !empty($_SESSION['unraid_user'])) {
// Redirect the user to the start page
header("Location: /".$start_page);
exit;
}
}
function readFromFile($file): string {
$text = "";
if (file_exists($file) && filesize($file) > 0) {
$fp = fopen($file,"r");
if (flock($fp, LOCK_EX)) {
$text = fread($fp, filesize($file));
flock($fp, LOCK_UN);
fclose($fp);
}
}
return $text;
}
function appendToFile($file, $text): void {
$fp = fopen($file,"a");
if (flock($fp, LOCK_EX)) {
fwrite($fp, $text);
fflush($fp);
flock($fp, LOCK_UN);
fclose($fp);
}
}
function writeToFile($file, $text): void {
$fp = fopen($file,"w");
if (flock($fp, LOCK_EX)) {
fwrite($fp, $text);
fflush($fp);
flock($fp, LOCK_UN);
fclose($fp);
}
}
// Source: https://stackoverflow.com/a/2524761
function isValidTimeStamp($timestamp)
{
return ((string) (int) $timestamp === $timestamp)
&& ($timestamp <= PHP_INT_MAX)
&& ($timestamp >= ~PHP_INT_MAX);
}
function cleanupFails(string $failFile, int $time): int {
global $cooldown;
// Read existing fails
@mkdir(dirname($failFile), 0755);
$failText = readFromFile($failFile);
$fails = explode("\n", trim($failText));
// Remove entries older than $cooldown minutes, and entries that are not timestamps
$updateFails = false;
foreach ((array) $fails as $key => $value) {
if ( !isValidTimeStamp($value) || ($time - $value > $cooldown) || ($value > $time) ) {
unset ($fails[$key]);
$updateFails = true;
}
}
// Save fails to disk
if ($updateFails) {
$failText = implode("\n", $fails)."\n";
writeToFile($failFile, $failText);
}
return count($fails);
}
function verifyUsernamePassword(string $username, string $password): bool {
if ($username != "root") return false;
$output = exec("/usr/bin/getent shadow $username");
if ($output === false) return false;
$credentials = explode(":", $output);
return password_verify($password, $credentials[1]);
}
function verifyTwoFactorToken(string $username, string $token): bool {
try {
// Create curl client
$curlClient = curl_init();
curl_setopt($curlClient, CURLOPT_HEADER, true);
curl_setopt($curlClient, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curlClient, CURLOPT_UNIX_SOCKET_PATH, '/var/run/unraid-api.sock');
curl_setopt($curlClient, CURLOPT_URL, 'http://unixsocket/verify');
curl_setopt($curlClient, CURLOPT_BUFFERSIZE, 256);
curl_setopt($curlClient, CURLOPT_TIMEOUT, 5);
curl_setopt($curlClient, CURLOPT_HTTPHEADER, array('Content-Type:application/json', 'Origin: /var/run/unraid-notifications.sock'));
curl_setopt($curlClient, CURLOPT_POSTFIELDS, json_encode([
'username' => $username,
'token' => $token
]));
// Send the request
curl_exec($curlClient);
// Get the http status code
$httpCode = curl_getinfo($curlClient, CURLINFO_HTTP_CODE);
// Close the connection
curl_close($curlClient);
// Error
// This should accept 200 or 204 status codes
if ($httpCode !== 200 && $httpCode !== 204) {
// Log error to syslog
my_logger("2FA code for {$username} is invalid, blocking access!");
return false;
}
// Log success to syslog
my_logger("2FA code for {$username} is valid, allowing login!");
// Success
return true;
} catch (Exception $exception) {
// Error
return false;
}
}
// Check if a haystack ends in a needle
function endsWith($haystack, $needle): bool {
return substr_compare($haystack, $needle, -strlen($needle)) === 0;
}
// Check if we're accessing this via a wildcard cert
function isWildcardCert(): bool {
global $server_name;
return endsWith($server_name, '.myunraid.net');
}
// Check if we're accessing this locally via the expected myunraid.net url
function isLocalAccess(): bool {
global $nginx, $server_name;
return isWildcardCert() && $nginx['NGINX_LANFQDN'] === $server_name;
}
// Check if we're accessing this remotely via the expected myunraid.net url
function isRemoteAccess(): bool {
global $nginx, $server_name;
return isWildcardCert() && $nginx['NGINX_WANFQDN'] === $server_name;
}
// Check if 2fa is enabled for local (requires USE_SSL to be "auto" so no alternate urls can access the server)
function isLocalTwoFactorEnabled(): bool {
global $nginx, $my_servers;
return $nginx['NGINX_USESSL'] === "auto" && ($my_servers['local']['2Fa']??'') === 'yes';
}
// Check if 2fa is enabled for remote
function isRemoteTwoFactorEnabled(): bool {
global $my_servers;
return ($my_servers['remote']['2Fa']??'') === 'yes';
}
// Load configs into memory
$my_servers = @parse_ini_file('/boot/config/plugins/dynamix.my.servers/myservers.cfg', true);
$nginx = @parse_ini_file('/var/local/emhttp/nginx.ini');
// Vars
$maxFails = 3;
$cooldown = 15 * 60; // 15 mins
$remote_addr = $_SERVER['REMOTE_ADDR'] ?? "unknown";
$failFile = "/var/log/pwfail/{$remote_addr}";
// Get the credentials
$username = $_POST['username']??'';
$password = $_POST['password']??'';
$token = $_REQUEST['token']??'';
// Check if we need 2fa
$twoFactorRequired = (isLocalAccess() && isLocalTwoFactorEnabled()) || (isRemoteAccess() && isRemoteTwoFactorEnabled());
// If we have a username + password combo attempt to login
if (!empty($username) && !empty($password)) {
try {
// Bail if we're missing the 2FA token and we expect one
if (isWildcardCert() && $twoFactorRequired && empty($token)) throw new Exception(_('No 2FA token detected'));
// Read existing fails, cleanup expired ones
$time = time();
$failCount = cleanupFails($failFile, $time);
// Check if we're limited
if ($failCount >= $maxFails) {
if ($failCount == $maxFails) my_logger("Ignoring login attempts for {$username} from {$remote_addr}");
throw new Exception(_('Too many invalid login attempts'));
}
// Bail if username + password combo doesn't work
if (!verifyUsernamePasswordAndSSO($username, $password)) throw new Exception(_('Invalid username or password'));
// Bail if we need a token but it's invalid
if (isWildcardCert() && $twoFactorRequired && !verifyTwoFactorToken($username, $token)) throw new Exception(_('Invalid 2FA token'));
// Successful login, start session
@unlink($failFile);
if (session_status()==PHP_SESSION_NONE) session_start();
$_SESSION['unraid_login'] = time();
$_SESSION['unraid_user'] = $username;
session_regenerate_id(true);
session_write_close();
my_logger("Successful login user {$username} from {$remote_addr}");
// Redirect the user to the start page
header("Location: /".$start_page);
exit;
} catch (Exception $exception) {
// Set error message
$error = $exception->getMessage();
// Log error to syslog
my_logger("Unsuccessful login user {$username} from {$remote_addr}");
appendToFile($failFile, $time."\n");
}
}
$boot = "/boot/config/plugins/dynamix";
$myFile = "case-model.cfg";
$myCase = file_exists("$boot/$myFile") ? file_get_contents("$boot/$myFile") : false;
extract(parse_plugin_cfg('dynamix', true));
$theme_dark = in_array($display['theme'], ['black', 'gray']);
?>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="content-type" content="text/html; charset=UTF-8">
<meta http-equiv="Cache-Control" content="no-cache">
<meta http-equiv="Pragma" content="no-cache">
<meta http-equiv="Expires" content="0">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width,initial-scale=1">
<meta name="robots" content="noindex, nofollow">
<meta http-equiv="Content-Security-Policy" content="block-all-mixed-content">
<meta name="referrer" content="same-origin">
<title><?=$var['NAME']?>/Login</title>
<style>
/************************
/
/ Fonts
/
/************************/
@font-face{font-family:clear-sans;font-weight:normal;font-style:normal; src:url('/webGui/styles/clear-sans.woff?v=20220513') format('woff')}
@font-face{font-family:clear-sans;font-weight:bold;font-style:normal; src:url('/webGui/styles/clear-sans-bold.woff?v=20220513') format('woff')}
@font-face{font-family:clear-sans;font-weight:normal;font-style:italic; src:url('/webGui/styles/clear-sans-italic.woff?v=20220513') format('woff')}
@font-face{font-family:clear-sans;font-weight:bold;font-style:italic; src:url('/webGui/styles/clear-sans-bold-italic.woff?v=20220513') format('woff')}
@font-face{font-family:bitstream;font-weight:normal;font-style:normal; src:url('/webGui/styles/bitstream.woff?v=20220513') format('woff')}
@font-face{font-family:bitstream;font-weight:bold;font-style:normal; src:url('/webGui/styles/bitstream-bold.woff?v=20220513') format('woff')}
@font-face{font-family:bitstream;font-weight:normal;font-style:italic; src:url('/webGui/styles/bitstream-italic.woff?v=20220513') format('woff')}
@font-face{font-family:bitstream;font-weight:bold;font-style:italic; src:url('/webGui/styles/bitstream-bold-italic.woff?v=20220513') format('woff')}
/************************
/
/ General styling
/
/************************/
body {
background: <?=$theme_dark?'#1C1B1B':'#F2F2F2'?>;
color: <?=$theme_dark?'#fff':'#1c1b1b'?>;
font-family: clear-sans, sans-serif;
font-size: .875rem;
padding: 0;
margin: 0;
}
a {
text-transform: uppercase;
font-weight: bold;
letter-spacing: 2px;
color: #FF8C2F;
text-decoration: none;
}
a:hover {
color: #f15a2c;
}
h1 {
font-size: 1.8em;
margin: 0;
}
h2 {
font-size: 0.8em;
margin-top: 0;
margin-bottom: 1.8em;
}
.button {
color: #ff8c2f;
font-family: clear-sans, sans-serif;
background: -webkit-gradient(linear,left top,right top,from(#e03237),to(#fd8c3c)) 0 0 no-repeat,-webkit-gradient(linear,left top,right top,from(#e03237),to(#fd8c3c)) 0 100% no-repeat,-webkit-gradient(linear,left bottom,left top,from(#e03237),to(#e03237)) 0 100% no-repeat,-webkit-gradient(linear,left bottom,left top,from(#fd8c3c),to(#fd8c3c)) 100% 100% no-repeat;
background: linear-gradient(90deg,#e03237 0,#fd8c3c) 0 0 no-repeat,linear-gradient(90deg,#e03237 0,#fd8c3c) 0 100% no-repeat,linear-gradient(0deg,#e03237 0,#e03237) 0 100% no-repeat,linear-gradient(0deg,#fd8c3c 0,#fd8c3c) 100% 100% no-repeat;
background-size: 100% 2px,100% 2px,2px 100%,2px 100%;
}
.button:hover {
color: #fff;
background-color: #f15a2c;
background: -webkit-gradient(linear,left top,right top,from(#e22828),to(#ff8c2f));
background: linear-gradient(90deg,#e22828 0,#ff8c2f);
-webkit-box-shadow: none;
box-shadow: none;
cursor: pointer;
}
.button--small {
font-size: .875rem;
font-weight: 600;
line-height: 1;
text-transform: uppercase;
letter-spacing: 2px;
text-align: center;
text-decoration: none;
display: inline-block;
background-color: transparent;
border-radius: .125rem;
border: 0;
-webkit-transition: none;
transition: none;
padding: .75rem 1.5rem;
}
[type=email], [type=number], [type=password], [type=search], [type=tel], [type=text], [type=url], textarea {
font-family: clear-sans, sans-serif;
font-size: .875rem;
background-color: #F2F2F2;
width: 100%;
margin-bottom: 1rem;
border: 2px solid #ccc;
padding: .75rem 1rem;
-webkit-box-sizing: border-box;
box-sizing: border-box;
border-radius: 0;
-webkit-appearance: none;
}
[type=email]:active, [type=email]:focus, [type=number]:active, [type=number]:focus, [type=password]:active, [type=password]:focus, [type=search]:active, [type=search]:focus, [type=tel]:active, [type=tel]:focus, [type=text]:active, [type=text]:focus, [type=url]:active, [type=url]:focus, textarea:active, textarea:focus {
border-color: #ff8c2f;
outline: none;
}
/************************
/
/ Login specific styling
/
/************************/
#login {
width: 500px;
margin: 6rem auto;
border-radius: 10px;
background: <?=$theme_dark?'#2B2A29':'#fff'?>;
}
#login::after {
content: "";
clear: both;
display: table;
}
#login .logo {
position: relative;
overflow: hidden;
height: 120px;
border-radius: 10px 10px 0 0;
}
#login .wordmark {
z-index: 1;
position: relative;
padding: 2rem;
}
#login .wordmark svg {
width: 100px;
}
#login .case {
float: right;
width: 30%;
font-size: 6rem;
text-align: center;
}
#login .case img {
max-width: 96px;
max-height: 96px;
}
#login .error {
color: red;
margin-top: -20px;
}
#login .content {
padding: 2rem;
}
#login .form {
width: 65%;
}
.angle:after {
content: "";
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 120px;
background-color: #f15a2c;
background: -webkit-gradient(linear,left top,right top,from(#e22828),to(#ff8c2f));
background: linear-gradient(90deg,#e22828 0,#ff8c2f);
-webkit-transform-origin: bottom left;
transform-origin: bottom left;
-webkit-transform: skewY(-6deg);
transform: skewY(-6deg);
-webkit-transition: -webkit-transform .15s linear;
transition: -webkit-transform .15s linear;
transition: transform .15s linear;
transition: transform .15s linear,-webkit-transform .15s linear;
}
.shadow {
-webkit-box-shadow: 0 2px 8px 0 rgba(0,0,0,.12);
box-shadow: 0 2px 8px 0 rgba(0,0,0,.12);
}
.hidden { display: none; }
/************************
/
/ Cases
/
/************************/
[class^="case-"], [class*=" case-"] {
/* use !important to prevent issues with browser extensions that change fonts */
font-family: 'cases' !important;
speak: none;
font-style: normal;
font-weight: normal;
font-variant: normal;
text-transform: none;
line-height: 1;
/* Better Font Rendering =========== */
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
/************************
/
/ Media queries for mobile responsive
/
/************************/
@media (max-width: 500px) {
body {
background: <?=$theme_dark?'#2B2A29':'#fff'?>;
}
[type=email], [type=number], [type=password], [type=search], [type=tel], [type=text], [type=url], textarea {
font-size: 16px; /* This prevents the mobile browser from zooming in on the input-field. */
}
#login {
margin: 0;
border-radius: 0;
width: 100%;
}
#login .logo {
border-radius: 0;
}
.shadow {
box-shadow: none;
}
}
</style>
<link type="text/css" rel="stylesheet" href="<?autov("/webGui/styles/default-cases.css")?>">
<link type="image/png" rel="shortcut icon" href="/webGui/images/<?=$var['mdColor']?>.png">
</head>
<body>
<section id="login" class="shadow">
<div class="logo angle">
<div class="wordmark"><svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 222.4 39" class="Nav__logo--white"><path fill="#ffffff" d="M146.70000000000002 29.5H135l-3 9h-6.5L138.9 0h8l13.4 38.5h-7.1L142.6 6.9l-5.8 16.9h8.2l1.7 5.7zM29.7 0v25.4c0 8.9-5.8 13.6-14.9 13.6C5.8 39 0 34.3 0 25.4V0h6.5v25.4c0 5.2 3.2 7.9 8.2 7.9 5.2 0 8.4-2.7 8.4-7.9V0h6.6zM50.9 12v26.5h-6.5V0h6.1l17 26.5V0H74v38.5h-6.1L50.9 12zM171.3 0h6.5v38.5h-6.5V0zM222.4 24.7c0 9-5.9 13.8-15.2 13.8h-14.5V0h14.6c9.2 0 15.1 4.8 15.1 13.8v10.9zm-6.6-10.9c0-5.3-3.3-8.1-8.5-8.1h-8.1v27.1h8c5.3 0 8.6-2.8 8.6-8.1V13.8zM108.3 23.9c4.3-1.6 6.9-5.3 6.9-11.5 0-8.7-5.1-12.4-12.8-12.4H88.8v38.5h6.5V5.7h6.9c3.8 0 6.2 1.8 6.2 6.7s-2.4 6.8-6.2 6.8h-3.4l9.2 19.4h7.5l-7.2-14.7z"></path></svg></div>
</div>
<div class="content">
<h1>
<?=htmlspecialchars($var['NAME'])?>
</h1>
<h2>
<?=htmlspecialchars($var['COMMENT'])?>
</h2>
<div class="case">
<?if ($myCase):?>
<?if (substr($myCase,-4)!='.png'):?>
<span class='case-<?=$myCase?>'></span>
<?else:?>
<img src='<?=autov("/webGui/images/$myCase")?>'>
<?endif;?>
<?endif;?>
</div>
<div class="form">
<form class="js-removeTimeout" action="/login" method="POST">
<? if (($twoFactorRequired && !empty($token)) || !$twoFactorRequired) { ?>
<p>
<input name="username" type="text" placeholder="<?=_('Username')?>" autocapitalize="none" autocomplete="off" spellcheck="false" autofocus required>
<input name="password" type="password" placeholder="<?=_('Password')?>" required>
<? if ($twoFactorRequired && !empty($token)) { ?>
<input name="token" type="hidden" value="<?= $token ?>">
<? } ?>
</p>
<? if ($error) echo "<p class='error'>$error</p>"; ?>
<p>
<button type="submit" class="button button--small"><?=_('Login')?></button>
</p>
<? } else { ?>
<? if ($error) { ?>
<div>
<p class="error" style="padding-top:10px;"><?= $error ?></p>
</div>
<? } else { ?>
<div>
<p class="error" style="padding-top:10px;" title="<?= _('Please access this server via the My Servers Dashboard') ?>"><?= _('No 2FA token detected') ?></p>
</div>
<? } ?>
<div>
<a href="https://forums.unraid.net/my-servers/" class="button button--small" title="<?=_('Go to My Servers Dashboard')?>"><?=_('Go to My Servers Dashboard')?></a>
</div>
<? } ?>
<script type="text/javascript">
document.cookie = "cookietest=1";
cookieEnabled = document.cookie.indexOf("cookietest=")!=-1;
document.cookie = "cookietest=1; expires=Thu, 01-Jan-1970 00:00:01 GMT";
if (!cookieEnabled) {
const errorElement = document.createElement('p');
errorElement.classList.add('error');
errorElement.textContent = "<?=_('Please enable cookies to use the Unraid webGUI')?>";
document.body.textContent = '';
document.body.appendChild(errorElement);
}
</script>
</form>
<?php include "$docroot/plugins/dynamix.my.servers/include/sso-login.php"; ?>
<? if (($twoFactorRequired && !empty($token)) || !$twoFactorRequired) { ?>
<div class="js-addTimeout hidden">
<p class="error" style="padding-top:10px;"><?=_('Transparent 2FA Token timed out')?></p>
<a href="https://forums.unraid.net/my-servers/" class="button button--small" title="<?=_('Go to My Servers Dashboard')?>"><?=_('Go to My Servers Dashboard')?></a>
</div>
<? } ?>
</div>
<? if (($twoFactorRequired && !empty($token)) || !$twoFactorRequired) { ?>
<p class="js-removeTimeout"><a href="https://docs.unraid.net/go/lost-root-password/" target="_blank"><?=_('Password recovery')?></a></p>
<? } ?>
</div>
</section>
<? if ($twoFactorRequired && !empty($token)) { ?>
<script type="text/javascript">
const $elsToRemove = document.querySelectorAll('.js-removeTimeout');
const $elsToShow = document.querySelectorAll('.js-addTimeout');
/**
* A user can manually refresh the page or submit with the wrong username/password
* the t2fa token will be re-used on these page refreshes. We need to keep track of the timeout across potential page
* loads rather than setting the timer with a fresh timeout each page load
*/
const tokenName = '<?=$token?>'.slice(-20);
const ts = Date.now();
const timeoutStarted = sessionStorage.getItem(tokenName) ? Number(sessionStorage.getItem(tokenName)) : ts;
const timeoutDiff = ts - timeoutStarted; // current timestamp minus timestamp when token first set
const timeoutMS = 297000 - timeoutDiff; // 5 minutes minus 3seconds or (5*60)*1000ms - 3000ms = 297000
sessionStorage.setItem(tokenName, timeoutStarted);
const tokenTimeout = setTimeout(() => {
$elsToRemove.forEach(z => z.remove()); // remove elements
$elsToShow.forEach(z => z.classList.remove('hidden')); // add elements
}, timeoutMS); // if timeoutMS is negative value the timeout will trigger immediately
</script>
<? } ?>
</body>
</html>
+14 -3
View File
@@ -137,6 +137,19 @@ export default defineConfig(({ mode }): ViteUserConfig => {
},
},
test: {
isolate: false,
poolOptions: {
threads: {
useAtomics: true,
minThreads: 8,
maxThreads: 16,
},
forks: {
maxForks: 16,
minForks: 8,
},
},
maxConcurrency: 10,
globals: true,
coverage: {
all: true,
@@ -145,14 +158,12 @@ export default defineConfig(({ mode }): ViteUserConfig => {
},
clearMocks: true,
setupFiles: [
'dotenv/config',
'reflect-metadata',
'src/__test__/setup/env-setup.ts',
'src/__test__/setup/keyserver-mock.ts',
],
exclude: ['**/deploy/**', '**/node_modules/**'],
env: {
NODE_ENV: 'test',
},
},
};
});
+7 -1
View File
@@ -1 +1,7 @@
.env
.env
source/*
deploy/*
.git/*
.github/*
.vscode/*
.DS_Store
+4 -6
View File
@@ -1,8 +1,6 @@
# API version in semver format (required)
API_VERSION=3.11.1
# SHA256 hash of the API package (required)
API_SHA256=e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855
# Pull request number for PR builds (optional)
PR=35
# Skip source validation (default: true for local testing)
SKIP_SOURCE_VALIDATION=true
# Skip validation (default: true for local testing)
SKIP_VALIDATION=true
# Local file server URL (optional)
LOCAL_FILESERVER_URL=http://192.168.1.100:8080
+5 -2
View File
@@ -9,5 +9,8 @@ source/dynamix.unraid.net/sftp-config.json
deploy/
!deploy/.gitkeep
usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/
!usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/.gitkeep
source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/*
!source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/.gitkeep
source/dynamix.unraid.net/usr/local/unraid-api/*
!source/dynamix.unraid.net/usr/local/unraid-api/.gitkeep
+123
View File
@@ -0,0 +1,123 @@
# Unraid Plugin Builder
Tool for building and testing Unraid plugins locally as well as packaging them for deployment.
## Development Workflow
### 1. Watch for Changes
The watch command will automatically sync changes from the API, UI components, and web app into the plugin source:
```bash
# Start watching all components
pnpm run watch:all
# Or run individual watchers:
pnpm run api:watch # Watch API changes
pnpm run ui:watch # Watch Unraid UI component changes
pnpm run wc:watch # Watch web component changes
```
This will copy:
- API files to `./source/dynamix.unraid.net/usr/local/unraid-api`
- UI components to `./source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components`
- Web components to the same UI directory
### 2. Build the Plugin
Once your changes are ready, build the plugin package:
```bash
# Build using Docker - on non-Linux systems
pnpm run docker:build-and-run
# Or build with the build script
pnpm run build:validate
```
This will create the plugin files in `./deploy/release/`
### 3. Serve and Install
Start a local HTTP server to serve the plugin files:
```bash
# Serve the plugin files
pnpm run http-server
```
Then install the plugin on your Unraid development machine by visiting:
`http://SERVER_NAME.local/Plugins`
Then paste the following URL into the Unraid Plugins page:
`http://YOUR_LOCAL_DEV_MACHINE_IP:8080/plugins/local/dynamix.unraid.net.plg`
Replace `SERVER_NAME` with your development machine's hostname.
## Development Tips
- Run watchers in a separate terminal while developing
- The http-server includes CORS headers for local development
- Check the Unraid system log for plugin installation issues
## Environment Setup
1. Initialize environment:
```bash
pnpm run env:init
```
2. Validate environment:
```bash
pnpm run env:validate
```
## Available Commands
### Build Commands
- `build` - Build the plugin package
- `build:validate` - Build with environment validation
- `docker:build` - Build the Docker container
- `docker:run` - Run the builder in Docker
- `docker:build-and-run` - Build and run in Docker
### Watch Commands
- `watch:all` - Watch all component changes
- `api:watch` - Watch API changes
- `ui:watch` - Watch UI component changes
- `wc:watch` - Watch web component changes
### Server Commands
- `http-server` - Serve the plugin files locally
### Environment Commands
- `env:init` - Create initial .env file
- `env:validate` - Validate environment setup
- `env:clean` - Remove .env file
## Troubleshooting
1. **Watch not updating files**
- Check that source directories exist
- Verify file permissions
2. **Build failures**
- Ensure .env file exists
- Check Docker setup if using containerized build
- Verify source files are present
3. **Installation issues**
- Confirm http-server is running
- Check your local IP is correct
- Verify plugin file permissions
+825
View File
File diff suppressed because it is too large Load Diff
+13 -2
View File
@@ -4,6 +4,7 @@
"date-fns": "^4.1.0",
"glob": "^11.0.1",
"html-sloppy-escaper": "^0.1.0",
"http-server": "^14.1.1",
"semver": "^7.7.1",
"tsx": "^4.19.2",
"zod": "^3.24.1",
@@ -18,12 +19,22 @@
"docker:build": "docker build -t plugin-builder .",
"docker:run": "docker run --env-file .env -v $(pwd)/deploy:/app/deploy -v $(cd ../ && pwd)/.git:/app/.git -v $(pwd)/source:/app/source plugin-builder",
"docker:build-and-run": "npm run docker:build && npm run docker:run",
"http-server": "http-server ./deploy/release/ -p 8080 --cors",
"// Environment management": "",
"env:init": "cp .env.example .env",
"env:validate": "node -e \"require('fs').existsSync('.env') || (console.error('Error: .env file missing. Run npm run env:init first') && process.exit(1))\"",
"env:validate": "test -f .env || (echo 'Error: .env file missing. Run npm run env:init first' && exit 1)",
"env:clean": "rm -f .env",
"// Composite commands": "",
"start": "npm run env:validate && npm run docker:build-and-run",
"test": "npm run env:init && npm run start && npm run env:clean"
"test": "npm run env:init && npm run start && npm run env:clean",
"// Watchers for Other Changes": "",
"wc:clean": "rm -r ./source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/*",
"wc:watch": "cpx -w -v \"../web/.nuxt/nuxt-custom-elements/dist/unraid-components/**/*\" ./source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components",
"api:watch": "cpx -w -C -v \"../api/deploy/pack/**/*\" ./source/dynamix.unraid.net/usr/local/unraid-api",
"ui:watch": "cpx -w -v \"../unraid-ui/dist/**/*\" ./source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components",
"watch:all": "npm run wc:clean && npm run wc:watch & npm run api:watch & npm run ui:watch"
},
"devDependencies": {
"cpx2": "^8.0.0"
}
}
+6 -23
View File
@@ -9,13 +9,11 @@
<!ENTITY source "/boot/config/plugins/dynamix.my.servers/&name;">
<!ENTITY SHA256 "">
<!ENTITY API_version "">
<!ENTITY API_SHA256 "">
<!ENTITY NODEJS_FILENAME "node-v20.18.1-linux-x64.tar.xz">
<!ENTITY NODEJS_SHA256 "c6fa75c841cbffac851678a472f2a5bd612fff8308ef39236190e1f8dbb0e567">
<!ENTITY NODEJS_TXZ "https://nodejs.org/dist/v20.18.1/node-v20.18.1-linux-x64.tar.xz">
<!ENTITY MAIN_TXZ "">
<!ENTITY API_TGZ "">
<!ENTITY PR "">
<!ENTITY TAG "">
]>
<PLUGIN name="&name;" author="&author;" version="&version;" pluginURL="&pluginURL;"
@@ -115,7 +113,6 @@ sha256check() {
}
]]>
sha256check "&source;.txz" "&SHA256;"
sha256check "/boot/config/plugins/dynamix.my.servers/unraid-api.tgz" "&API_SHA256;"
exit 0
</INLINE>
</FILE>
@@ -163,19 +160,12 @@ sha256check() {
<SHA256>&SHA256;</SHA256>
</FILE>
<!-- download unraid-api -->
<FILE Name="/boot/config/plugins/dynamix.my.servers/unraid-api.tgz">
<URL>&API_TGZ;</URL>
<SHA256>&API_SHA256;</SHA256>
</FILE>
<FILE Run="/bin/bash" Method="install">
<INLINE>
MAINTXZ="&source;.txz"
<![CDATA[
# before proceeding with install, doubly confirm downloaded files exist. just being pedantic.
FILE=${MAINTXZ} && [[ ! -f "$FILE" ]] && echo "⚠️ file missing - $FILE" && exit 1
FILE=/boot/config/plugins/dynamix.my.servers/unraid-api.tgz && [[ ! -f "$FILE" ]] && echo "⚠️ file missing - $FILE" && exit 1
exit 0
]]>
</INLINE>
@@ -432,6 +422,7 @@ if [ -e /etc/rc.d/rc.unraid-api ]; then
rm -rf /boot/config/plugins/dynamix.my.servers/wc
rm -f /boot/config/plugins/Unraid.net/unraid-api.tgz
rm -f /boot/config/plugins/Unraid.net/.gitignore
rm -f /boot/config/plugins/dynamix.my.servers/unraid-api.tgz
rm -rf /boot/config/plugins/Unraid.net/webComps
rm -rf /boot/config/plugins/Unraid.net/wc
rm -f /usr/local/emhttp/webGui/javascript/vue.js
@@ -451,7 +442,7 @@ exit 0
<!-- install all the things -->
<FILE Run="/bin/bash" Method="install">
<INLINE>
PR="&PR;" PLGTYPE="&env;" MAINTXZ="&source;.txz"
TAG="&TAG;" PLGTYPE="&env;" MAINTXZ="&source;.txz"
<![CDATA[
appendTextIfMissing() {
FILE="$1" TEXT="$2"
@@ -757,8 +748,8 @@ upgradepkg --install-new --reinstall "${MAINTXZ}"
# WARNING: failure here results in broken install
[[ ! -f /usr/local/emhttp/plugins/dynamix.my.servers/scripts/gitflash_log ]] && echo "⚠️ files missing from main txz" && exit 1
if [[ -n "$PR" && "$PR" != "" ]]; then
printf -v sedcmd 's@^\*\*Unraid Connect\*\*@**Unraid Connect PR #%s**@' "$PR"
if [[ -n "$TAG" && "$TAG" != "" ]]; then
printf -v sedcmd 's@^\*\*Unraid Connect\*\*@**Unraid Connect (%s)**@' "$TAG"
sed -i "${sedcmd}" "/usr/local/emhttp/plugins/dynamix.unraid.net/README.md"
elif [[ "$PLGTYPE" == "staging" ]]; then
sed -i "s@^\*\*Unraid Connect\*\*@**Unraid Connect (staging)**@" "/usr/local/emhttp/plugins/dynamix.unraid.net/README.md"
@@ -877,9 +868,6 @@ echo
# - Must come after the web component timestamp check to avoid potentially targeting the wrong JS during this setup
source /usr/local/emhttp/plugins/dynamix.my.servers/scripts/activation_code_setup
# Install the API (previously in rc.d script)
echo "Extracting and installing the Unraid API"
flash="/boot/config/plugins/dynamix.my.servers"
[[ ! -d "${flash}" ]] && echo "Please reinstall the Unraid Connect plugin" && exit 1
[[ ! -f "${flash}/env" ]] && echo 'env=production' >"${flash}/env"
@@ -901,14 +889,9 @@ fi
# Kill any remaining unraid-api processes
pkill -9 unraid-api
# Ensure installation tgz exists
[[ ! -f "${flash}/unraid-api.tgz" ]] && echo "Missing unraid-api.tgz" && exit 1
# Install unraid-api
rm -rf "${api_base_directory}"
mkdir -p "${api_base_directory}"
# Create log directory (PM2 will not start without it)
mkdir -p /var/log/unraid-api
tar -C "${api_base_directory}" -xzf "${flash}/unraid-api.tgz" --strip 1
# Copy env file
cp "${api_base_directory}/.env.${env}" "${api_base_directory}/.env"
+87 -67
View File
@@ -2,42 +2,29 @@ import { execSync } from "child_process";
import { cp, readFile, writeFile, mkdir, readdir } from "fs/promises";
import { basename, join } from "path";
import { createHash } from "node:crypto";
import { $, cd, dotenv } from "zx";
import { z } from "zod";
import { $, cd } from "zx";
import conventionalChangelog from "conventional-changelog";
import { escape as escapeHtml } from "html-sloppy-escaper";
import { parse } from "semver";
import { existsSync } from "fs";
import { format as formatDate } from "date-fns";
const envSchema = z.object({
API_VERSION: z.string().refine((v) => {
return parse(v) ?? false;
}, "Must be a valid semver version"),
API_SHA256: z.string().regex(/^[a-f0-9]{64}$/),
PR: z
.string()
.optional()
.refine((v) => !v || /^\d+$/.test(v), "Must be a valid PR number"),
SKIP_SOURCE_VALIDATION: z
.string()
.optional()
.default("false")
.refine((v) => v === "true" || v === "false", "Must be true or false"),
});
type Env = z.infer<typeof envSchema>;
const validatedEnv = envSchema.parse(dotenv.config() as Env);
import { setupEnvironment } from "./setup-environment";
import { dirname } from "node:path";
const pluginName = "dynamix.unraid.net" as const;
const startingDir = process.cwd();
const validatedEnv = await setupEnvironment(startingDir);
const BASE_URLS = {
STABLE: "https://stable.dl.unraid.net/unraid-api",
PREVIEW: "https://preview.dl.unraid.net/unraid-api",
...(validatedEnv.LOCAL_FILESERVER_URL
? { LOCAL: validatedEnv.LOCAL_FILESERVER_URL }
: {}),
} as const;
// Setup environment variables
// Ensure that git is available
try {
await $`git log -1 --pretty=%B`;
} catch (err) {
@@ -93,6 +80,23 @@ const validateSourceDir = async () => {
if (webcomponents.length === 1 && webcomponents[0] === ".gitkeep") {
throw new Error(`No webcomponents found in ${webcomponentDir}`);
}
// Check for the existence of "ui.manifest.json" as well as "manifest.json" in webcomponents
if (
!webcomponents.includes("ui.manifest.json") ||
!webcomponents.includes("manifest.json")
) {
throw new Error(
`Webcomponents must contain both "ui.manifest.json" and "manifest.json"`
);
}
const apiDir = join(
startingDir,
"source/dynamix.unraid.net/usr/local/unraid-api/package.json"
);
if (!existsSync(apiDir)) {
throw new Error(`API directory ${apiDir} does not exist`);
}
};
const buildTxz = async (
@@ -101,9 +105,13 @@ const buildTxz = async (
txzName: string;
txzSha256: string;
}> => {
if (validatedEnv.SKIP_SOURCE_VALIDATION !== "true") {
if (
validatedEnv.SKIP_VALIDATION !== "true" ||
validatedEnv.LOCAL_FILESERVER_URL
) {
await validateSourceDir();
}
const txzName = `${pluginName}-${version}.txz`;
const txzPath = join(startingDir, "deploy/release/archive", txzName);
const prePackDir = join(startingDir, "deploy/pre-pack");
@@ -129,7 +137,10 @@ const buildTxz = async (
await cd(prePackDir);
$.verbose = true;
await $`${join(startingDir, "scripts/makepkg")} -l y -c y "${txzPath}"`;
await $`${join(
startingDir,
"scripts/makepkg"
)} -l y -c y --compress -1 "${txzPath}"`;
$.verbose = false;
await cd(startingDir);
@@ -139,11 +150,13 @@ const buildTxz = async (
.digest("hex");
console.log(`TXZ SHA256: ${sha256}`);
try {
await $`${join(startingDir, "scripts/explodepkg")} "${txzPath}"`;
} catch (err) {
console.error(`Error: invalid txz package created: ${txzPath}`);
process.exit(1);
if (validatedEnv.SKIP_VALIDATION !== "true") {
try {
await $`${join(startingDir, "scripts/explodepkg")} "${txzPath}"`;
} catch (err) {
console.error(`Error: invalid txz package created: ${txzPath}`);
process.exit(1);
}
}
return { txzSha256: sha256, txzName };
@@ -151,9 +164,9 @@ const buildTxz = async (
const getStagingChangelogFromGit = async (
apiVersion: string,
pr: string | null = null
tag: string | null = null
): Promise<string | null> => {
console.debug("Getting changelog from git" + (pr ? " for PR" : ""));
console.debug("Getting changelog from git" + (tag ? " for TAG" : ""));
try {
const changelogStream = conventionalChangelog(
{
@@ -162,16 +175,16 @@ const getStagingChangelogFromGit = async (
{
version: apiVersion,
},
pr
tag
? {
from: "origin/main",
to: "HEAD",
}
: {},
undefined,
pr
tag
? {
headerPartial: `## [PR #${pr}](https://github.com/unraid/api/pull/${pr})\n\n`,
headerPartial: `## [${tag}](https://github.com/unraid/api/${tag})\n\n`,
}
: undefined
);
@@ -192,49 +205,49 @@ const buildPlugin = async ({
txzSha256,
txzName,
version,
pr = "",
tag = "",
apiVersion,
apiSha256,
}: {
type: "staging" | "pr" | "production";
type: "staging" | "pr" | "production" | "local";
txzSha256: string;
txzName: string;
version: string;
pr?: string;
tag?: string;
apiVersion: string;
apiSha256: string;
}) => {
const rootPlgFile = join(startingDir, "/plugins/", `${pluginName}.plg`);
// Set up paths
const newPluginFile = join(
startingDir,
"/deploy/release/plugins/",
`${pluginName}${type === "production" ? "" : `.${type}`}.plg`
type,
`${pluginName}.plg`
);
// Define URLs
let PLUGIN_URL = "";
let MAIN_TXZ = "";
let API_TGZ = "";
let RELEASE_NOTES: string | null = null;
switch (type) {
case "production":
PLUGIN_URL = `${BASE_URLS.STABLE}/${pluginName}.plg`;
MAIN_TXZ = `${BASE_URLS.STABLE}/${txzName}`;
API_TGZ = `${BASE_URLS.STABLE}/unraid-api-${apiVersion}.tgz`;
break;
case "pr":
PLUGIN_URL = `${BASE_URLS.PREVIEW}/pr/${pr}/${pluginName}.plg`;
MAIN_TXZ = `${BASE_URLS.PREVIEW}/pr/${pr}/${txzName}`;
API_TGZ = `${BASE_URLS.PREVIEW}/pr/${pr}/unraid-api-${apiVersion}.tgz`;
RELEASE_NOTES = await getStagingChangelogFromGit(apiVersion, pr);
PLUGIN_URL = `${BASE_URLS.PREVIEW}/tag/${tag}/${pluginName}.plg`;
MAIN_TXZ = `${BASE_URLS.PREVIEW}/tag/${tag}/${txzName}`;
RELEASE_NOTES = await getStagingChangelogFromGit(apiVersion, tag);
break;
case "staging":
PLUGIN_URL = `${BASE_URLS.PREVIEW}/${pluginName}.plg`;
MAIN_TXZ = `${BASE_URLS.PREVIEW}/${txzName}`;
API_TGZ = `${BASE_URLS.PREVIEW}/unraid-api-${apiVersion}.tgz`;
RELEASE_NOTES = await getStagingChangelogFromGit(apiVersion);
break;
case "local":
PLUGIN_URL = `${BASE_URLS.LOCAL}/plugins/${type}/${pluginName}.plg`;
MAIN_TXZ = `${BASE_URLS.LOCAL}/archive/${txzName}`;
RELEASE_NOTES = await getStagingChangelogFromGit(apiVersion, tag);
break;
}
// Update plg file
@@ -248,15 +261,13 @@ const buildPlugin = async ({
pluginURL: PLUGIN_URL,
SHA256: txzSha256,
MAIN_TXZ: MAIN_TXZ,
API_TGZ: API_TGZ,
PR: pr,
TAG: tag,
API_version: apiVersion,
API_SHA256: apiSha256,
};
// Iterate over entities and update them
Object.entries(entities).forEach(([key, value]) => {
if (key !== "PR" && !value) {
if (key !== "TAG" && !value) {
throw new Error(`Entity ${key} not set in entities : ${value}`);
}
plgContent = updateEntityValue(plgContent, key, value);
@@ -270,6 +281,7 @@ const buildPlugin = async ({
);
}
await mkdir(dirname(newPluginFile), { recursive: true });
await writeFile(newPluginFile, plgContent);
console.log(`${type} plugin: ${newPluginFile}`);
};
@@ -284,33 +296,41 @@ const main = async () => {
const version = formatDate(new Date(), "yyyy.MM.dd.HHmm");
console.log(`Version: ${version}`);
const { txzSha256, txzName } = await buildTxz(version);
const { API_VERSION, API_SHA256, PR } = validatedEnv;
const { API_VERSION, TAG, LOCAL_FILESERVER_URL } = validatedEnv;
if (LOCAL_FILESERVER_URL) {
await buildPlugin({
type: "local",
txzSha256,
txzName,
version,
tag: TAG,
apiVersion: API_VERSION,
});
} else if (TAG) {
await buildPlugin({
type: "pr",
txzSha256,
txzName,
version,
tag: TAG,
apiVersion: API_VERSION,
});
}
await buildPlugin({
type: "staging",
txzSha256,
txzName,
version,
apiVersion: API_VERSION,
apiSha256: API_SHA256,
});
if (PR) {
await buildPlugin({
type: "pr",
txzSha256,
txzName,
version,
pr: PR,
apiVersion: API_VERSION,
apiSha256: API_SHA256,
});
}
await buildPlugin({
type: "production",
txzSha256,
txzName,
version,
apiVersion: API_VERSION,
apiSha256: API_SHA256,
});
};
+1 -1
View File
@@ -380,7 +380,7 @@ else
fi
if [ "$CHOWN" = "y" ]; then
find . -type d -exec chmod -v 755 {} \;
find . -type d -exec chown -v root.root {} \;
find . -type d -exec chown -v root:root {} \;
fi
# Ensure that the 'root' of the package is chmod 755 because
-103
View File
@@ -1,103 +0,0 @@
#!/bin/bash
# LEGACY SCRIPT - Kept for validation purposes. If still present after May 2025, delete.
# passes `shellcheck` and `shfmt -i 2`
[[ "$1" == "s" ]] && env=staging
[[ "$1" == "p" ]] && env=production
[[ -z "${env}" ]] && echo "usage: [s|p]" && exit 1
# If we have a second parameter, it's the PR number (for Pull request builds)
[[ -n "$2" ]] && PR="$2" || PR=""
DIR=$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")
MAINDIR=$(dirname "$(dirname "${DIR}")")
tmpdir=$(mktemp -d)
pluginSrc=$(basename "${DIR}")
plugin="${pluginSrc}"
version=$(date +"%Y.%m.%d.%H%M")
plgfile="${MAINDIR}/plugins/${plugin}.plg"
txzfile="${MAINDIR}/archive/${plugin}-${version}.txz"
# create txz package
mkdir -p "$(dirname "${txzfile}")"
mkdir -p "${tmpdir}"
# shellcheck disable=SC2046
cp --parents -f $(find . -type f ! \( -iname ".DS_Store" -o -iname "pkg_build.sh" -o -iname "makepkg" -o -iname "explodepkg" -o -iname "sftp-config.json" \)) "${tmpdir}/"
cd "${tmpdir}" || exit 1
chmod 0755 -R .
sudo chown root:root -R .
sudo "${MAINDIR}/source/dynamix.unraid.net/makepkg" -l y -c y "${txzfile}"
sudo rm -rf "${tmpdir}"
md5=$(md5sum "${txzfile}" | cut -f 1 -d ' ')
echo "MD5: ${md5}"
sha256=$(sha256sum "${txzfile}" | cut -f 1 -d ' ')
echo "SHA256: ${sha256}"
# test txz package
mkdir -p "${tmpdir}"
cd "${tmpdir}" || exit 1
RET=$(sudo "${MAINDIR}/source/dynamix.unraid.net/explodepkg" "${txzfile}" 2>&1 >/dev/null)
sudo rm -rf "${tmpdir}"
[[ "${RET}" != "" ]] && echo "Error: invalid txz package created: ${txzfile}" && exit 1
cd "${DIR}" || exit 1
# define vars for plg
PLUGIN_URL="https://stable.dl.unraid.net/unraid-api/\&name;.plg"
MAIN_TXZ="https://stable.dl.unraid.net/unraid-api/${plugin}-${version}.txz"
API_TGZ="https://stable.dl.unraid.net/unraid-api/unraid-api-${API_VERSION}.tgz"
# Check if PR is set, use a different path if so
if [[ -n "${PR}" ]]; then
MAIN_TXZ="https://preview.dl.unraid.net/unraid-api/pr/${PR}/${plugin}-${version}.txz"
API_TGZ="https://preview.dl.unraid.net/unraid-api/pr/${PR}/unraid-api-${API_VERSION}.tgz"
PLUGIN_URL="https://preview.dl.unraid.net/unraid-api/pr/${PR}/${plugin}.plg"
elif [[ "${env}" == "staging" ]]; then
PLUGIN_URL="https://preview.dl.unraid.net/unraid-api/\&name;.plg"
MAIN_TXZ="https://preview.dl.unraid.net/unraid-api/${plugin}-${version}.txz"
API_TGZ="https://preview.dl.unraid.net/unraid-api/unraid-api-${API_VERSION}.tgz"
fi
# update plg file
sed -i -E "s#(ENTITY name\s*)\".*\"#\1\"${plugin}\"#g" "${plgfile}"
sed -i -E "s#(ENTITY env\s*)\".*\"#\1\"${env}\"#g" "${plgfile}"
sed -i -E "s#(ENTITY version\s*)\".*\"#\1\"${version}\"#g" "${plgfile}"
sed -i -E "s#(ENTITY pluginURL\s*)\".*\"#\1\"${PLUGIN_URL}\"#g" "${plgfile}"
sed -i -E "s#(ENTITY SHA256\s*)\".*\"#\1\"${sha256}\"#g" "${plgfile}"
sed -i -E "s#(ENTITY MAIN_TXZ\s*)\".*\"#\1\"${MAIN_TXZ}\"#g" "${plgfile}"
sed -i -E "s#(ENTITY API_TGZ\s*)\".*\"#\1\"${API_TGZ}\"#g" "${plgfile}"
sed -i -E "s#(ENTITY PR\s*)\".*\"#\1\"${PR}\"#g" "${plgfile}"
# set from environment vars
sed -i -E "s#(ENTITY API_version\s*)\".*\"#\1\"${API_VERSION}\"#g" "${plgfile}"
sed -i -E "s#(ENTITY API_SHA256\s*)\".*\"#\1\"${API_SHA256}\"#g" "${plgfile}"
# validate that all ENTITY values are present
required_entities=("name" "env" "version" "pluginURL" "SHA256" "MAIN_TXZ" "API_TGZ" "NODEJS_FILENAME" "NODEJS_SHA256" "NODEJS_TXZ" "API_version" "API_SHA256")
validation_failed=false
for entity in "${required_entities[@]}"; do
entity_value=$(grep -oP "ENTITY ${entity} \"\K[^\"]*" "${plgfile}" || echo "")
if [[ -z "${entity_value}" ]]; then
echo "Error: ENTITY ${entity} was not replaced correctly in ${plgfile}"
validation_failed=true
elif [[ "${entity_value}" =~ ^[[:space:]]*$ ]]; then
echo "Error: ENTITY ${entity} has an empty value in ${plgfile}"
validation_failed=true
fi
done
if [[ "${validation_failed}" == "true" ]]; then
if [[ -f "${plgfile}.bak" ]]; then
echo "Restoring backup due to validation failure"
mv "${plgfile}.bak" "${plgfile}"
fi
exit 1
fi
# add changelog for major versions
# sed -i "/<CHANGES>/a ###${version}\n" ${plgfile}
echo
grep -E "ENTITY (name|PLUGIN_URL|env|version|MD5|SHA256|node_api_version|MAIN_TXZ|API_TGZ)" "${plgfile}"
echo
echo "${env} plugin: ${plgfile}"
echo "${env} txz: ${txzfile}"
+73
View File
@@ -0,0 +1,73 @@
import { readFile } from "fs/promises";
import { join } from "path";
import { z } from "zod";
import { parse } from "semver";
import { dotenv } from "zx";
const wait = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms));
const envSchema = z.object({
API_VERSION: z.string().refine((v) => {
return parse(v) ?? false;
}, "Must be a valid semver version"),
TAG: z
.string()
.optional(),
SKIP_VALIDATION: z
.string()
.optional()
.default("false")
.refine((v) => v === "true" || v === "false", "Must be true or false"),
LOCAL_FILESERVER_URL: z.string().url().optional(),
});
type Env = z.infer<typeof envSchema>;
export const setupEnvironment = async (
startingDir: string
): Promise<Env> => {
const getLocalEnvironmentVariablesFromApiFolder = async (): Promise<Partial<Env>> => {
const apiDir = join(
startingDir,
"source/dynamix.unraid.net/usr/local/unraid-api"
);
const apiPackageJson = join(apiDir, "package.json");
const apiPackageJsonContent = await readFile(apiPackageJson, "utf8");
const apiPackageJsonObject = JSON.parse(apiPackageJsonContent);
return {
API_VERSION: apiPackageJsonObject.version,
};
};
const validatedEnv = envSchema.parse(
{
...process.env,
...(await dotenv.config()),
...(await getLocalEnvironmentVariablesFromApiFolder()),
}
);
let shouldWait = false;
if (validatedEnv.SKIP_VALIDATION == "true") {
console.warn("SKIP_VALIDATION is true, skipping validation");
shouldWait = true;
}
if (validatedEnv.TAG) {
console.warn("TAG is set, will generate a TAGGED build");
shouldWait = true;
}
if (validatedEnv.LOCAL_FILESERVER_URL) {
console.warn("LOCAL_FILESERVER_URL is set, will generate a local build");
shouldWait = true;
}
console.log("validatedEnv", validatedEnv);
if (shouldWait) {
await wait(1000);
}
return validatedEnv;
};