feat: native slackware package (#1381)

<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->
## Summary by CodeRabbit

- **New Features**
- Added detailed versioning for plugin packages incorporating
architecture and build identifiers.
- Simplified and improved install/uninstall scripts with backup and
dynamic package detection.
- Introduced comprehensive setup, verification, patching, and cleanup
scripts for the Unraid API environment.
- Enhanced service control with explicit start, stop, restart, and
status commands.
- Added robust dependency management scripts for restoring and archiving
Node.js modules.
- Implemented vendor archive metadata storage and dynamic handling
during build and runtime.
- Added new CLI options and environment schemas for consistent build
configuration.
- Introduced new shutdown scripts to gracefully stop flash-backup and
unraid-api services.
- Added utility scripts for API version detection and vendor archive
configuration.
- Added a new package description file detailing Unraid API features and
homepage link.

- **Bug Fixes**
- Improved validation and error reporting for missing manifests,
dependencies, and configuration files.
  - Enhanced fallback logic for locating and creating vendor archives.
- Fixed iframe compatibility in UI by updating HTML and Firefox
preference files.

- **Chores**
- Updated .gitignore with generated file patterns for Node.js binaries
and archives.
  - Removed obsolete internal documentation and legacy cleanup scripts.
- Refined Docker Compose and CI workflows to pass precise API versioning
and manage build artifacts.
- Centralized common environment validation and CLI option definitions
across build tools.
- Cleaned up plugin manifest by removing Node.js and PNPM-related
entities and legacy logic.
- Improved logging and error handling in build and installation scripts.
<!-- end of auto-generated comment: release notes by coderabbit.ai -->
This commit is contained in:
Eli Bosley
2025-05-08 22:54:10 -04:00
committed by GitHub
parent a5f48da322
commit 4f63b4cf3b
35 changed files with 1658 additions and 1018 deletions

View File

@@ -104,7 +104,7 @@ jobs:
uses: actions/download-artifact@v4
with:
name: packed-node-modules
path: ${{ github.workspace }}/plugin/
path: ${{ github.workspace }}/plugin/node-modules-archive/
- name: Extract Unraid API
run: |
mkdir -p ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/unraid-api
@@ -113,9 +113,8 @@ jobs:
id: build-plugin
run: |
cd ${{ github.workspace }}/plugin
ls -al
pnpm run build:txz
pnpm run build:plugin --tag="${{ inputs.TAG }}" --base-url="${{ inputs.BASE_URL }}"
pnpm run build:txz --tag="${{ inputs.TAG }}" --base-url="${{ inputs.BASE_URL }}" --api-version="${{ steps.vars.outputs.API_VERSION }}"
pnpm run build:plugin --tag="${{ inputs.TAG }}" --base-url="${{ inputs.BASE_URL }}" --api-version="${{ steps.vars.outputs.API_VERSION }}"
- name: Ensure Plugin Files Exist
run: |

View File

@@ -94,7 +94,7 @@ jobs:
auth_unix_rw = "none"
EOF
# Add the current user to libvirt and kvm groups (note: this change wont apply to the current session)
# Add the current user to libvirt and kvm groups (note: this change won't apply to the current session)
sudo usermod -aG libvirt,kvm $USER
sudo mkdir -p /var/run/libvirt
@@ -179,11 +179,11 @@ jobs:
PACKAGE_LOCK_VERSION=$(jq -r '.version' package.json)
API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")
export API_VERSION
echo "API_VERSION=${API_VERSION}" >> $GITHUB_ENV
- name: Build
run: |
pnpm run build:release
tar -czf deploy/unraid-api.tgz -C deploy/pack/ .
- name: Upload tgz to Github artifacts
@@ -195,7 +195,7 @@ jobs:
uses: actions/upload-artifact@v4
with:
name: packed-node-modules
path: ${{ github.workspace }}/api/deploy/packed-node-modules.tar.xz
path: ${{ github.workspace }}/api/deploy/node-modules-archive/packed-node-modules.tar.xz
build-unraid-ui-webcomponents:
name: Build Unraid UI Library (Webcomponent Version)

2
.gitignore vendored
View File

@@ -108,4 +108,4 @@ web/scripts/.sync-webgui-repo-*
plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/data/activation-data.php
# Config file that changes between versions
api/dev/Unraid.net/myservers.cfg
api/dev/Unraid.net/myservers.cfg

2
.nvmrc
View File

@@ -1 +1 @@
22
22.11.0

View File

@@ -5,7 +5,7 @@ import { exit } from 'process';
import type { PackageJson } from 'type-fest';
import { $, cd } from 'zx';
import { getDeploymentVersion } from './get-deployment-version.js';
import { getDeploymentVersion } from '@app/../scripts/get-deployment-version.js';
type ApiPackageJson = PackageJson & {
version: string;
@@ -49,13 +49,11 @@ try {
await writeFile('package.json', JSON.stringify(parsedPackageJson, null, 4));
const sudoCheck = await $`command -v sudo`.nothrow();
const SUDO = sudoCheck.exitCode === 0 ? 'sudo' : '';
await $`${SUDO} chown -R 0:0 node_modules`;
await $`XZ_OPT=-5 tar -cJf packed-node-modules.tar.xz node_modules`;
await $`mv packed-node-modules.tar.xz ../`;
await $`${SUDO} rm -rf node_modules`;
// Create a subdirectory for the node modules archive
await mkdir('../node-modules-archive', { recursive: true });
await $`mv packed-node-modules.tar.xz ../node-modules-archive/`;
await $`rm -rf node_modules`;
// chmod the cli
await $`chmod +x ./dist/cli.js`;

19
plugin/.gitignore vendored
View File

@@ -1,4 +1,3 @@
# Thumbnails
._*
Thumbs.db
@@ -15,6 +14,22 @@ source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-com
source/dynamix.unraid.net/usr/local/unraid-api/*
!source/dynamix.unraid.net/usr/local/unraid-api/.gitkeep
source/dynamix.unraid.net/install/doinst.sh
### Generated Files by Plugin Builder ###
# Ignore node binaries
source/dynamix.unraid.net/usr/local/bin/
packed-pnpm-store.txz
# Node.js download files
node-v*.tar.xz
# Node.js man pages
source/dynamix.unraid.net/usr/local/share/man/man1/node.1
# Node version files
.node-version
source/dynamix.unraid.net/usr/local/.node-version
# Vendor archive files
source/dynamix.unraid.net/usr/local/share/dynamix.unraid.net/config/vendor_archive.json

View File

@@ -2,7 +2,7 @@ import { readFile, writeFile, mkdir, rename } from "fs/promises";
import { $ } from "zx";
import { escape as escapeHtml } from "html-sloppy-escaper";
import { dirname, join } from "node:path";
import { getTxzName, pluginName, startingDir } from "./utils/consts";
import { getTxzName, pluginName, startingDir, defaultArch, defaultBuild } from "./utils/consts";
import { getAssetUrl, getPluginUrl } from "./utils/bucket-urls";
import { getMainTxzUrl } from "./utils/bucket-urls";
import {
@@ -26,9 +26,17 @@ const checkGit = async () => {
}
};
const moveTxzFile = async (txzPath: string, pluginVersion: string) => {
const txzName = getTxzName(pluginVersion);
await rename(txzPath, join(deployDir, txzName));
const moveTxzFile = async ({txzPath, apiVersion}: Pick<PluginEnv, "txzPath" | "apiVersion">) => {
const txzName = getTxzName(apiVersion);
const targetPath = join(deployDir, txzName);
// Ensure the txz always has the full version name
if (txzPath !== targetPath) {
console.log(`Ensuring TXZ has correct name: ${txzPath} -> ${targetPath}`);
await rename(txzPath, targetPath);
} else {
console.log(`TXZ file already has correct name: ${txzPath}`);
}
};
function updateEntityValue(
@@ -50,7 +58,10 @@ const buildPlugin = async ({
tag,
txzSha256,
releaseNotes,
apiVersion,
}: PluginEnv) => {
console.log(`API version: ${apiVersion}`);
// Update plg file
let plgContent = await readFile(getRootPluginPath({ startingDir }), "utf8");
@@ -58,12 +69,16 @@ const buildPlugin = async ({
const entities: Record<string, string> = {
name: pluginName,
version: pluginVersion,
pluginURL: getPluginUrl({ baseUrl, tag }),
MAIN_TXZ: getMainTxzUrl({ baseUrl, pluginVersion, tag }),
TXZ_SHA256: txzSha256,
VENDOR_STORE_URL: getAssetUrl({ baseUrl, tag }, getVendorBundleName()),
VENDOR_STORE_FILENAME: getVendorBundleName(),
...(tag ? { TAG: tag } : {}),
api_version: apiVersion,
arch: defaultArch,
build: defaultBuild,
plugin_url: getPluginUrl({ baseUrl, tag }),
txz_url: getMainTxzUrl({ baseUrl, apiVersion, tag }),
txz_sha256: txzSha256,
txz_name: getTxzName(apiVersion),
vendor_store_url: getAssetUrl({ baseUrl, tag }, getVendorBundleName(apiVersion)),
vendor_store_filename: getVendorBundleName(apiVersion),
...(tag ? { tag } : {}),
};
console.log("Entities:", entities);
@@ -107,8 +122,8 @@ const main = async () => {
await cleanupPluginFiles();
await buildPlugin(validatedEnv);
await moveTxzFile(validatedEnv.txzPath, validatedEnv.pluginVersion);
await bundleVendorStore();
await moveTxzFile(validatedEnv);
await bundleVendorStore(validatedEnv.apiVersion);
} catch (error) {
console.error(error);
process.exit(1);

View File

@@ -1,11 +1,16 @@
import { join } from "path";
import { $, cd } from "zx";
import { existsSync } from "node:fs";
import { readdir } from "node:fs/promises";
import { readdir, writeFile } from "node:fs/promises";
import { getTxzName, pluginName, startingDir } from "./utils/consts";
import { ensureNodeJs } from "./utils/nodejs-helper";
import { setupTxzEnv, TxzEnv } from "./cli/setup-txz-environment";
import { cleanupTxzFiles } from "./utils/cleanup";
import { apiDir } from "./utils/paths";
import { getVendorBundleName, getVendorFullPath } from "./build-vendor-store";
import { getAssetUrl } from "./utils/bucket-urls";
// Recursively search for manifest files
const findManifestFiles = async (dir: string): Promise<string[]> => {
@@ -40,6 +45,59 @@ const findManifestFiles = async (dir: string): Promise<string[]> => {
}
};
// Function to store vendor archive information in a recoverable location
const storeVendorArchiveInfo = async (version: string, vendorUrl: string, vendorFilename: string) => {
try {
if (!version || !vendorUrl || !vendorFilename) {
throw new Error("Cannot store vendor archive info: Missing required parameters");
}
// Create a config directory in the source tree
const configDir = join(
startingDir,
"source",
"dynamix.unraid.net",
"usr",
"local",
"share",
"dynamix.unraid.net",
"config"
);
// Ensure directory exists
await $`mkdir -p ${configDir}`;
// Get the full path for vendor archive
const vendorFullPath = getVendorFullPath(version);
// Create a JSON config file with vendor information
const configData = {
vendor_store_url: vendorUrl,
vendor_store_path: vendorFullPath,
api_version: version
};
// Validate all fields are present
Object.entries(configData).forEach(([key, value]) => {
if (!value) {
throw new Error(`Cannot store vendor archive info: Missing value for ${key}`);
}
});
const configPath = join(configDir, "vendor_archive.json");
await writeFile(configPath, JSON.stringify(configData, null, 2));
console.log(`Vendor archive information stored in ${configPath}`);
console.log(`API Version: ${version}`);
console.log(`Vendor URL: ${vendorUrl}`);
console.log(`Vendor Full Path: ${vendorFullPath}`);
return true;
} catch (error) {
console.error(`Failed to store vendor archive information: ${error.message}`);
throw error; // Re-throw to prevent build from succeeding with invalid vendor info
}
};
const validateSourceDir = async (validatedEnv: TxzEnv) => {
if (!validatedEnv.ci) {
console.log("Validating TXZ source directory");
@@ -70,8 +128,15 @@ const validateSourceDir = async (validatedEnv: TxzEnv) => {
if (!hasManifest || !hasUiManifest) {
console.log("Existing Manifest Files:", manifestFiles);
const missingFiles: string[] = [];
if (!hasManifest) missingFiles.push("manifest.json");
if (!hasUiManifest) missingFiles.push("ui.manifest.json");
throw new Error(
`Webcomponents must contain both "ui.manifest.json" and "manifest.json" - be sure to have run pnpm build:wc in unraid-ui`
`Webcomponents missing required file(s): ${missingFiles.join(", ")} - ` +
`${!hasUiManifest ? "run 'pnpm build:wc' in unraid-ui for ui.manifest.json" : ""}` +
`${!hasManifest && !hasUiManifest ? " and " : ""}` +
`${!hasManifest ? "run 'pnpm build' in web for manifest.json" : ""}`
);
}
@@ -88,16 +153,37 @@ const validateSourceDir = async (validatedEnv: TxzEnv) => {
const buildTxz = async (validatedEnv: TxzEnv) => {
await validateSourceDir(validatedEnv);
const txzPath = join(validatedEnv.txzOutputDir, getTxzName());
// Use version from validated environment
const version = validatedEnv.apiVersion;
// Always use version when getting txz name
const txzName = getTxzName(version);
console.log(`Package name: ${txzName}`);
const txzPath = join(validatedEnv.txzOutputDir, txzName);
// Use the getVendorBundleName function for consistent naming
const vendorFilename = getVendorBundleName(version);
// Use the baseUrl and tag from validatedEnv, consistent with build-plugin.ts
const vendorUrl = getAssetUrl({
baseUrl: validatedEnv.baseUrl,
tag: validatedEnv.tag
}, vendorFilename);
console.log(`Storing vendor archive information: ${vendorUrl} -> ${vendorFilename}`);
await storeVendorArchiveInfo(version, vendorUrl, vendorFilename);
await ensureNodeJs();
// Create package - must be run from within the pre-pack directory
// Use cd option to run command from prePackDir
await cd(join(startingDir, "source", pluginName));
$.verbose = true;
// Create the package using the default package name
await $`${join(startingDir, "scripts/makepkg")} --chown y --compress -${
validatedEnv.compress
} --linkadd y ${txzPath}`;
} --linkadd n ${txzPath}`;
$.verbose = false;
await cd(startingDir);
};

View File

@@ -1,42 +1,103 @@
import { apiDir, deployDir } from "./utils/paths";
import { deployDir, vendorStorePath } from "./utils/paths";
import { join } from "path";
import { readFileSync } from "node:fs";
import { existsSync, mkdirSync } from "node:fs";
import { startingDir } from "./utils/consts";
import { copyFile } from "node:fs/promises";
/**
* Get the version of the API from the package.json file
*
* Throws if package.json is not found or is invalid JSON.
* @returns The version of the API
*/
function getVersion(): string {
const packageJsonPath = join(apiDir, "package.json");
const packageJsonString = readFileSync(packageJsonPath, "utf8");
const packageJson = JSON.parse(packageJsonString);
return packageJson.version;
}
import { copyFile, stat } from "node:fs/promises";
import { execSync } from "child_process";
/**
* The name of the node_modules archive that will be vendored with the plugin.
* @param version API version to use in the filename
* @returns The name of the node_modules bundle file
*/
export function getVendorBundleName(): string {
const version = getVersion();
export function getVendorBundleName(version: string): string {
return `node_modules-for-v${version}.tar.xz`;
}
/**
* Get the full path where the vendor bundle should be stored
* @param version API version to use in the filename
* @returns The full path to the vendor bundle
*/
export function getVendorFullPath(version: string): string {
return join(vendorStorePath, getVendorBundleName(version));
}
/**
* Create a tarball of the node_modules for local development
* @param outputPath Path to write the tarball to
*/
async function createNodeModulesTarball(outputPath: string): Promise<void> {
console.log(`Creating node_modules tarball at ${outputPath}`);
try {
// Create a tarball of the node_modules directly from the API directory
const apiNodeModules = join(process.cwd(), "..", "api", "node_modules");
if (existsSync(apiNodeModules)) {
console.log(`Found API node_modules at ${apiNodeModules}, creating tarball...`);
execSync(`tar -cJf "${outputPath}" -C "${join(process.cwd(), "..", "api")}" node_modules`);
console.log(`Successfully created node_modules tarball at ${outputPath}`);
return;
}
throw new Error(`API node_modules not found at ${apiNodeModules}`);
} catch (error) {
console.error(`Failed to create node_modules tarball: ${error}`);
throw error;
}
}
/**
* Prepare a versioned bundle of the API's node_modules to vendor dependencies.
*
* It expects a generic `packed-node-modules.tar.xz` archive to be available in the `startingDir`.
* It copies this archive to the `deployDir` directory and adds a version to the filename.
* It does not actually create the packed node_modules archive; that is done inside the API's build script.
* It first tries to use the `packed-node-modules.tar.xz` from the mounted volume.
* If that fails, it checks the parent API directory and tries to create a tarball from node_modules.
*
* After this operation, the vendored node_modules will be available inside the `deployDir`.
*
* @param apiVersion Required API version to use for the vendor bundle
*/
export async function bundleVendorStore(): Promise<void> {
const storeArchive = join(startingDir, "packed-node-modules.tar.xz");
const vendorStoreTarPath = join(deployDir, getVendorBundleName());
await copyFile(storeArchive, vendorStoreTarPath);
export async function bundleVendorStore(apiVersion: string): Promise<void> {
// Ensure deploy directory exists
mkdirSync(deployDir, { recursive: true });
const vendorStoreTarPath = join(deployDir, getVendorBundleName(apiVersion));
// Possible locations for the node modules archive
const possibleLocations = [
join(startingDir, "node-modules-archive/packed-node-modules.tar.xz"), // Docker mount
join(process.cwd(), "..", "api", "deploy", "node-modules-archive", "packed-node-modules.tar.xz") // Direct path to API deploy
];
let foundArchive = false;
for (const archivePath of possibleLocations) {
try {
console.log(`Checking for vendor store at ${archivePath}`);
if (!existsSync(archivePath)) {
console.log(`Archive not found at ${archivePath}`);
continue;
}
const stats = await stat(archivePath);
if (!stats.isFile()) {
console.log(`${archivePath} exists but is not a file`);
continue;
}
console.log(`Copying vendor store from ${archivePath} to ${vendorStoreTarPath}`);
await copyFile(archivePath, vendorStoreTarPath);
console.log(`Successfully copied vendor store to ${vendorStoreTarPath}`);
foundArchive = true;
break;
} catch (error) {
console.log(`Error checking ${archivePath}: ${error}`);
}
}
if (!foundArchive) {
console.log("Could not find existing node_modules archive, attempting to create one");
// Create a temporary archive in the deploy directory
const tempArchivePath = join(deployDir, "temp-node-modules.tar.xz");
await createNodeModulesTarball(tempArchivePath);
await copyFile(tempArchivePath, vendorStoreTarPath);
}
}

View File

@@ -0,0 +1,38 @@
import { Command } from "commander";
import { z } from "zod";
/**
* Common base environment fields shared between different build setups
*/
export const baseEnvSchema = z.object({
ci: z.boolean().optional().default(false),
apiVersion: z.string(),
baseUrl: z.string().url(),
tag: z.string().optional().default(''),
});
export type BaseEnv = z.infer<typeof baseEnvSchema>;
/**
* Generate a default base URL for local development
*/
export const getDefaultBaseUrl = (): string => {
return process.env.CI === "true"
? "This is a CI build, please set the base URL manually"
: `http://${process.env.HOST_LAN_IP || 'localhost'}:5858`;
};
/**
* Common CLI options shared across different command setups
*/
export const addCommonOptions = (program: Command) => {
return program
.option("--ci", "CI mode", process.env.CI === "true")
.requiredOption("--api-version <version>", "API version", process.env.API_VERSION)
.requiredOption(
"--base-url <url>",
"Base URL for assets",
getDefaultBaseUrl()
)
.option("--tag <tag>", "Tag (used for PR and staging builds)", process.env.TAG);
};

View File

@@ -4,12 +4,11 @@ import { Command } from "commander";
import { getStagingChangelogFromGit } from "../utils/changelog";
import { createHash } from "node:crypto";
import { getTxzPath } from "../utils/paths";
import { existsSync } from "node:fs";
import { join } from "node:path";
import { baseEnvSchema, addCommonOptions } from "./common-environment";
const safeParseEnvSchema = z.object({
ci: z.boolean().optional(),
baseUrl: z.string().url(),
tag: z.string().optional().default(''),
const safeParseEnvSchema = baseEnvSchema.extend({
txzPath: z.string().refine((val) => val.endsWith(".txz"), {
message: "TXZ Path must end with .txz",
}),
@@ -28,6 +27,85 @@ const pluginEnvSchema = safeParseEnvSchema.extend({
export type PluginEnv = z.infer<typeof pluginEnvSchema>;
/**
* Resolves the txz path, trying multiple possible locations based on apiVersion
* Also verifies the file exists and is accessible
* @param txzPath Initial txz path to check
* @param apiVersion API version to use for alternative path
* @param isCi Whether we're running in CI
* @returns Object containing the resolved txz path and SHA256 hash
* @throws Error if no valid txz file can be found
*/
export const resolveTxzPath = async (txzPath: string, apiVersion: string, isCi?: boolean): Promise<{path: string, sha256: string}> => {
if (existsSync(txzPath)) {
await access(txzPath, constants.F_OK);
console.log("Reading txz file from:", txzPath);
const txzFile = await readFile(txzPath);
if (!txzFile || txzFile.length === 0) {
throw new Error(`TXZ file is empty: ${txzPath}`);
}
return {
path: txzPath,
sha256: getSha256(txzFile)
};
}
console.log(`TXZ path not found at: ${txzPath}`);
console.log(`Attempting to find TXZ using apiVersion: ${apiVersion}`);
// Try different formats of generated TXZ name
const deployDir = join(process.cwd(), "deploy");
// Try with exact apiVersion format
const alternativePaths = [
join(deployDir, `dynamix.unraid.net-${apiVersion}-x86_64-1.txz`),
];
// In CI, we sometimes see unusual filenames, so try a glob-like approach
if (isCi) {
console.log("Checking for possible TXZ files in deploy directory");
try {
// Using node's filesystem APIs to scan the directory
const fs = require('fs');
const deployFiles = fs.readdirSync(deployDir);
// Find any txz file that contains the apiVersion
for (const file of deployFiles) {
if (file.endsWith('.txz') &&
file.includes('dynamix.unraid.net') &&
file.includes(apiVersion.split('+')[0])) {
alternativePaths.push(join(deployDir, file));
}
}
} catch (error) {
console.log(`Error scanning deploy directory: ${error}`);
}
}
// Check each path
for (const path of alternativePaths) {
if (existsSync(path)) {
console.log(`Found TXZ at: ${path}`);
await access(path, constants.F_OK);
console.log("Reading txz file from:", path);
const txzFile = await readFile(path);
if (!txzFile || txzFile.length === 0) {
console.log(`TXZ file is empty: ${path}, trying next alternative`);
continue;
}
return {
path,
sha256: getSha256(txzFile)
};
}
console.log(`Could not find TXZ at: ${path}`);
}
// If we get here, we couldn't find a valid txz file
throw new Error(`Could not find any valid TXZ file. Tried original path: ${txzPath} and alternatives.`);
};
export const validatePluginEnv = async (
envArgs: Record<string, any>
): Promise<PluginEnv> => {
@@ -48,15 +126,10 @@ export const validatePluginEnv = async (
: await getStagingChangelogFromGit(safeEnv);
}
if (safeEnv.txzPath) {
await access(safeEnv.txzPath, constants.F_OK);
console.log("Reading txz file from:", safeEnv.txzPath);
const txzFile = await readFile(safeEnv.txzPath);
if (!txzFile || txzFile.length === 0) {
throw new Error(`TXZ Path is empty: ${safeEnv.txzPath}`);
}
envArgs.txzSha256 = getSha256(txzFile);
}
// Resolve and validate the txz path
const { path, sha256 } = await resolveTxzPath(safeEnv.txzPath, safeEnv.apiVersion, safeEnv.ci);
envArgs.txzPath = path;
envArgs.txzSha256 = sha256;
const validatedEnv = pluginEnvSchema.parse(envArgs);
@@ -87,27 +160,22 @@ export const setupPluginEnv = async (argv: string[]): Promise<PluginEnv> => {
// CLI setup for plugin environment
const program = new Command();
// Add common options
addCommonOptions(program);
// Add plugin-specific options
program
.requiredOption(
"--base-url <url>",
"Base URL - will be used to determine the bucket, and combined with the tag (if set) to form the final URL",
process.env.CI === "true"
? "This is a CI build, please set the base URL manually"
: `http://${process.env.HOST_LAN_IP}:5858`
)
.option(
"--txz-path <path>",
"Path to built package, will be used to generate the SHA256 and renamed with the plugin version",
getTxzPath({ startingDir: process.cwd() })
getTxzPath({ startingDir: process.cwd(), pluginVersion: process.env.API_VERSION })
)
.option(
"--plugin-version <version>",
"Plugin Version in the format YYYY.MM.DD.HHMM",
getPluginVersion()
)
.option("--tag <tag>", "Tag (used for PR and staging builds)", process.env.TAG)
.option("--release-notes-path <path>", "Path to release notes file")
.option("--ci", "CI mode", process.env.CI === "true")
.parse(argv);
const options = program.opts();

View File

@@ -3,9 +3,9 @@ import { z } from "zod";
import { Command } from "commander";
import { startingDir } from "../utils/consts";
import { deployDir } from "../utils/paths";
import { baseEnvSchema, addCommonOptions } from "./common-environment";
const txzEnvSchema = z.object({
ci: z.boolean().optional().default(false),
const txzEnvSchema = baseEnvSchema.extend({
skipValidation: z
.string()
.optional()
@@ -33,11 +33,13 @@ export const setupTxzEnv = async (argv: string[]): Promise<TxzEnv> => {
// CLI setup for TXZ environment
const program = new Command();
// Add common options first
addCommonOptions(program);
// Add TXZ-specific options
program
.option("--skip-validation", "Skip validation", "false")
.option("--ci", "CI mode", process.env.CI === "true")
.option("--compress, -z", "Compress level", "1")
.parse(argv);
const options = program.opts();

View File

@@ -1,4 +1,4 @@
import { getTxzName, LOCAL_BUILD_TAG, pluginNameWithExt } from "./consts";
import { getTxzName, LOCAL_BUILD_TAG, pluginNameWithExt, defaultArch, defaultBuild } from "./consts";
// Define a common interface for URL parameters
interface UrlParams {
@@ -7,7 +7,7 @@ interface UrlParams {
}
interface TxzUrlParams extends UrlParams {
pluginVersion: string;
apiVersion: string;
}
/**
@@ -44,7 +44,7 @@ export const getPluginUrl = (params: UrlParams): string =>
/**
* Get the URL for the main TXZ file
* ex. returns = BASE_URL/TAG/dynamix.unraid.net-4.1.3.txz
* ex. returns = BASE_URL/TAG/dynamix.unraid.net-4.1.3-x86_64-1.txz
*/
export const getMainTxzUrl = (params: TxzUrlParams): string =>
getAssetUrl(params, getTxzName(params.pluginVersion));
getAssetUrl(params, getTxzName(params.apiVersion, defaultArch, defaultBuild));

View File

@@ -1,8 +1,13 @@
export const pluginName = "dynamix.unraid.net" as const;
export const pluginNameWithExt = `${pluginName}.plg` as const;
export const getTxzName = (version?: string) =>
version ? `${pluginName}-${version}.txz` : `${pluginName}.txz`;
// Default architecture and build number for Slackware package
export const defaultArch = "x86_64" as const;
export const defaultBuild = "1" as const;
// Get the txz name following Slackware naming convention: name-version-arch-build.txz
export const getTxzName = (version?: string, arch: string = defaultArch, build: string = defaultBuild) =>
version ? `${pluginName}-${version}-${arch}-${build}.txz` : `${pluginName}.txz`;
export const startingDir = process.cwd();
export const BASE_URLS = {

View File

@@ -0,0 +1,59 @@
import { join } from "path";
import { existsSync, mkdirSync, createWriteStream, readFileSync } from "fs";
import { writeFile, readFile } from "fs/promises";
import { get } from "https";
import { $ } from "zx";
import { startingDir } from "./consts";
const findNvmrc = () => {
const nvmrcPaths = [
join(startingDir, "..", ".nvmrc"),
join(startingDir, ".nvmrc"),
];
for (const nvmrcPath of nvmrcPaths) {
if (existsSync(nvmrcPath)) {
return nvmrcPath;
}
}
throw new Error("NVMRC file not found");
}
// Read Node.js version from .nvmrc
const NVMRC_PATH = findNvmrc();
console.log(`NVMRC_PATH: ${NVMRC_PATH}`);
const NODE_VERSION = readFileSync(NVMRC_PATH, "utf8").trim();
const NODE_FILENAME = `node-v${NODE_VERSION}-linux-x64.tar.xz`;
const NODE_URL = `https://nodejs.org/download/release/v${NODE_VERSION}/${NODE_FILENAME}`;
const NODE_DEST = join(startingDir, "source", "dynamix.unraid.net", "usr", "local");
const NODE_VERSION_FILE = join(NODE_DEST, ".node-version");
async function fetchFile(url: string, dest: string) {
return new Promise((resolve, reject) => {
const file = createWriteStream(dest);
get(url, (response) => {
if (response.statusCode !== 200) {
reject(new Error(`Failed to get '${url}' (${response.statusCode})`));
return;
}
response.pipe(file);
file.on("finish", () => file.close(resolve));
file.on("error", reject);
}).on("error", reject);
});
}
export async function ensureNodeJs() {
let currentVersion: string | null = null;
if (existsSync(NODE_VERSION_FILE)) {
currentVersion = (await readFile(NODE_VERSION_FILE, "utf8")).trim();
}
if (currentVersion !== NODE_VERSION) {
mkdirSync(NODE_DEST, { recursive: true });
if (!existsSync(NODE_FILENAME)) {
await fetchFile(NODE_URL, NODE_FILENAME);
}
// Extract Node.js excluding include/node and share/doc/node directories
await $`tar --strip-components=1 -xf ${NODE_FILENAME} --exclude="*/include/node" --exclude="*/share/doc/node" --exclude="*/README.md" --exclude="*/LICENSE" --exclude="*/CHANGELOG.md" -C ${NODE_DEST}`;
await writeFile(NODE_VERSION_FILE, NODE_VERSION, "utf8");
}
}

View File

@@ -25,6 +25,8 @@ export const apiDir = join(
"unraid-api"
);
export const vendorStorePath = "/boot/config/plugins/dynamix.my.servers";
/**
* Get the path to the root plugin directory
* @param startingDir - The starting directory

View File

@@ -7,15 +7,17 @@ services:
- ./:/app
- /app/node_modules
- ../.git:/app/.git
- ../.nvmrc:/app/.nvmrc
- ./source:/app/source
- ./scripts:/app/scripts
- ../unraid-ui/dist-wc:/app/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/uui
- ../web/.nuxt/nuxt-custom-elements/dist/unraid-components:/app/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/nuxt
- ../api/deploy/pack/:/app/source/dynamix.unraid.net/usr/local/unraid-api
- ../api/deploy/packed-node-modules.tar.xz:/app/packed-node-modules.tar.xz
- ../api/deploy/node-modules-archive:/app/node-modules-archive
stdin_open: true # equivalent to -i
tty: true # equivalent to -t
environment:
- HOST_LAN_IP=${HOST_LAN_IP}
- CI=${CI:-false}
- TAG=${TAG}
- API_VERSION=${API_VERSION}

View File

@@ -1,15 +0,0 @@
# Hidden Flags
Use the following flags for additional functionality
1. The deleteOnUninstall setting is for internal developers who are switching between the staging and production plugins, or otherwise installing/uninstalling the plugin a lot. Setting this to "no" prevents the uninstall routine from deleting your local flash backup files and disabling Remote Access. The assumption is that you will be reinstalling the plugin and don't want to lose those settings.
```
[plugin]
deleteOnUninstall="no"
```
# Plugin Hosted Urls
- Main: https://s3.amazonaws.com/dnld.lime-technology.com/unraid-api/dynamix.unraid.net.plg
- Staging: https://s3.amazonaws.com/dnld.lime-technology.com/unraid-api/dynamix.unraid.net.staging.plg

View File

@@ -12,7 +12,7 @@
"tsx": "^4.19.2",
"zod": "^3.24.1",
"zx": "^8.3.2"
},
},
"type": "module",
"license": "GPL-2.0-or-later",
"scripts": {
@@ -21,7 +21,7 @@
"build:txz": "tsx builder/build-txz.ts",
"build:plugin": "tsx builder/build-plugin.ts",
"build:validate": "npm run env:validate && npm run build",
"build:watcher": "nodemon --verbose --watch 'source/**/*' --watch 'plugins/dynamix.unraid.net.plg' --ext ts,js,plg --ignore '*.test.ts' --ignore 'node_modules/**' --ignore 'source/dynamix.unraid.net/install/**' --delay 5s --exec 'pnpm run build'",
"build:watcher": "nodemon --verbose --watch 'source/**/*' --watch 'plugins/dynamix.unraid.net.plg' --ext ts,js,plg,sh --ignore '*.test.ts' --ignore 'node_modules/**' --delay 5s --exec 'pnpm run build'",
"// Docker commands": "",
"build:watch": "./scripts/dc.sh pnpm run build:watcher",
"docker:build": "docker compose build",

View File

@@ -1,39 +1,23 @@
<?xml version='1.0' standalone='yes'?>
<!DOCTYPE PLUGIN [
<!ENTITY name "">
<!ENTITY name "dynamix.unraid.net">
<!ENTITY launch "Connect">
<!ENTITY author "limetech">
<!ENTITY version "">
<!ENTITY pluginURL "">
<!ENTITY source "/boot/config/plugins/dynamix.my.servers/&name;">
<!ENTITY TXZ_SHA256 "">
<!-- Node.js Runtime. Required to run the Unraid API. -->
<!ENTITY NODEJS_VERSION "22.14.0">
<!-- Version is omitted from filename, so we don't need to search/delete other versions when updating the plugin. -->
<!ENTITY NODEJS_FILENAME "node-linux-x64.tar.xz">
<!-- To get SHA256:
wget https://nodejs.org/download/release/v22.14.0/node-v22.14.0-linux-x64.tar.xz
sha256sum node-v22.14.0-linux-x64.tar.xz
-->
<!ENTITY NODEJS_SHA256 "69b09dba5c8dcb05c4e4273a4340db1005abeafe3927efda2bc5b249e80437ec">
<!ENTITY NODEJS_TXZ "https://nodejs.org/download/release/v&NODEJS_VERSION;/node-v&NODEJS_VERSION;-linux-x64.tar.xz">
<!ENTITY MAIN_TXZ "">
<!-- PNPM package manager for Node.js. Decouples dependencies from MAIN_TXZ. Prevents supply chain attacks. -->
<!-- PNPM_BINARY is the filename of the binary on the boot drive. (In)validated via SHA256. -->
<!ENTITY PNPM_BINARY "/boot/config/plugins/dynamix.my.servers/pnpm-linuxstatic-x64">
<!ENTITY PNPM_BINARY_URL "https://github.com/pnpm/pnpm/releases/download/v10.7.0/pnpm-linuxstatic-x64">
<!ENTITY PNPM_BINARY_SHA256 "714f4c21b63f47ed415f2e59f4bf5c699aa4f58b4d88e15ce6c66cda5631ebb2">
<!-- VENDOR_STORE_URL points to an XZ tarball of vendored dependencies (i.e. global pnpm store), specific to the plugin version.
This archive may be updated after installation (e.g. when adding api plugins), so we don't verify its hash.
It is replaced only when the plugin/api is updated. -->
<!ENTITY VENDOR_STORE_URL "">
<!-- The archive's filename on the boot drive. Enables reproducible offline installs of the Unraid API. -->
<!ENTITY VENDOR_STORE_FILENAME "">
<!ENTITY TAG "">
<!ENTITY NODE_DIR "/usr/libexec/node">
<!ENTITY plugin_url "">
<!ENTITY source "/boot/config/plugins/dynamix.my.servers/&txz_name;">
<!ENTITY txz_sha256 "">
<!ENTITY txz_url "">
<!ENTITY txz_name "">
<!ENTITY vendor_store_url "">
<!ENTITY vendor_store_filename "">
<!ENTITY arch "x86_64">
<!ENTITY build "1">
<!ENTITY tag "">
<!ENTITY api_version "">
]>
<PLUGIN name="&name;" author="&author;" version="&version;" pluginURL="&pluginURL;"
<PLUGIN name="&name;" author="&author;" version="&version;" pluginURL="&plugin_url;"
launch="&launch;" min="6.9.0-rc1" icon="globe">
<CHANGES>
@@ -41,26 +25,7 @@
- initial release
</CHANGES>
<!-- prevent prod plugin from installing when staging already installed, and vice versa -->
<FILE Run="/bin/bash" Method="install">
<INLINE>
name="&name;" version="&version;" pluginURL="&pluginURL;"
<![CDATA[
echo "Installing ${name}.plg ${version}"
if [ -f /boot/config/plugins/dynamix.unraid.net.staging.plg ]; then
echo "ERROR: Cannot proceed with installation"
echo "Reason: Staging Unraid Connect plugin detected at /boot/config/plugins/dynamix.unraid.net.staging.plg"
echo "Action required: Please uninstall the existing Unraid Connect Staging plugin first"
echo "How to fix: Navigate to Plugins > Installed Plugins in the Unraid web GUI and remove the staging plugin"
echo "Use this URL to reinstall this plugin: ${pluginURL}"
exit 1
fi
exit 0
]]>
</INLINE>
</FILE>
<!-- gzip check, DNS check, then validate files before doing anything destructive -->
<!-- Check disk space before installation -->
<FILE Run="/bin/bash" Method="install">
<INLINE>
<![CDATA[
@@ -78,117 +43,22 @@ if [ "$FREE_SPACE" -lt 300 ]; then
fi
echo "ok. (${FREE_SPACE}MB free)"
version=
# shellcheck disable=SC1091
source /etc/unraid-version
# ensure gzip is available and executes, is needed by '/etc/rc.d/rc.unraid-api install'
if [ ! -x /bin/gzip ] || ! /bin/gzip -V &>/dev/null; then
echo "⚠️ Unable to install as gzip is not available on this system. For help, post your diagnostics.zip to a new support thread in the forum."
exit 1
fi
dnscheck() {
HOST=$1
if [ -x /usr/bin/host ] && ! /usr/bin/host -W 10 "${HOST}" &>/dev/null; then
echo "⚠️ Warning: Your DNS server (${DNS_SERVER1}) is unable to resolve '${HOST}'"
DNSERR=yes
fi
}
# shellcheck disable=SC1090
source <(grep "DNS_SERVER1" /usr/local/emhttp/state/network.ini 2>/dev/null)
DNSERR=no
echo "Checking DNS..."
dnscheck "mothership.unraid.net"
[[ "${DNSERR}" == "yes" && "${DNS_SERVER1}" != "8.8.8.8" ]] && echo " Recommend navigating to Settings -> Network Settings and changing your DNS server to 8.8.8.8"
# Note: DNS checks will fail if the network is not available at boot. Cannot exit the install when DNS checks fail.
exit 0
]]>
</INLINE>
</FILE>
<FILE Name="/boot/config/plugins/dynamix.my.servers/&NODEJS_FILENAME;">
<URL>&NODEJS_TXZ;</URL>
<SHA256>&NODEJS_SHA256;</SHA256>
</FILE>
<FILE Name="&PNPM_BINARY;">
<URL>&PNPM_BINARY_URL;</URL>
<SHA256>&PNPM_BINARY_SHA256;</SHA256>
</FILE>
<FILE Name="/boot/config/plugins/dynamix.my.servers/&VENDOR_STORE_FILENAME;">
<URL>&VENDOR_STORE_URL;</URL>
</FILE>
<FILE Run="/bin/bash" Method="install">
<INLINE>
NODE_FILE="&NODEJS_FILENAME;"
VENDOR_ARCHIVE="&VENDOR_STORE_FILENAME;"
NODE_DIR="&NODE_DIR;"
<![CDATA[
# Check if the Node.js archive exists
if [[ ! -f "/boot/config/plugins/dynamix.my.servers/${NODE_FILE}" ]]; then
echo "Node.js archive not found at /boot/config/plugins/dynamix.my.servers/${NODE_FILE}"
exit 1
fi
# Perform a dry run to verify the archive is valid
if ! tar --strip-components=1 -tf "/boot/config/plugins/dynamix.my.servers/${NODE_FILE}" > /dev/null; then
echo "Node.js archive is corrupt or invalid"
exit 1
fi
# Create the target directory if it doesn't exist
mkdir -p "${NODE_DIR}" || { echo "Failed to create ${NODE_DIR}"; exit 1; }
# Extract the archive to the target directory
if ! tar --strip-components=1 -xf "/boot/config/plugins/dynamix.my.servers/${NODE_FILE}" -C "${NODE_DIR}"; then
echo "Failed to extract Node.js archive"
exit 1
fi
# Remove all node js archives from the flashdrive that do not match the expected version
# deprecated Apr 2025. kept to remove unused archives for users upgrading from versioned node downloads.
find /boot/config/plugins/dynamix.my.servers/ -name "node-v*-linux-x64.tar.xz" ! -name "${NODE_FILE}" -delete
# Remove stale pnpm store and node_modules archives from the boot drive
find /boot/config/plugins/dynamix.my.servers/ -name "pnpm-store-for-v*.txz" ! -name "${VENDOR_ARCHIVE}" -delete
find /boot/config/plugins/dynamix.my.servers/ -name "node_modules-for-v*.tar.xz" ! -name "${VENDOR_ARCHIVE}" -delete
# Remove the legacy node directory
rm -rf /usr/local/node
echo "Node.js installation successful"
exit 0
]]>
</INLINE>
<FILE Name="/boot/config/plugins/dynamix.my.servers/&vendor_store_filename;">
<URL>&vendor_store_url;</URL>
</FILE>
<!-- download main txz -->
<FILE Name="&source;.txz">
<URL>&MAIN_TXZ;</URL>
<SHA256>&TXZ_SHA256;</SHA256>
<FILE Name="&source;">
<URL>&txz_url;</URL>
<SHA256>&txz_sha256;</SHA256>
</FILE>
<FILE Run="/bin/bash" Method="remove">
<INLINE>
<![CDATA[
version=
# shellcheck disable=SC1091
source /etc/unraid-version
# Run cleanup operations
echo "Performing cleanup operations..."
/usr/bin/php /usr/local/emhttp/plugins/dynamix.my.servers/scripts/cleanup_operations.php
exit 0
]]>
</INLINE>
</FILE>
<!-- Cleanup for install on unsupported OS -->
<!-- Check for compatible Unraid version -->
<FILE Run="/usr/bin/php" Method="install">
<INLINE>
<![CDATA[
@@ -202,49 +72,75 @@ $version = @parse_ini_file('/etc/unraid-version', true)['version'];
$is_stable_6_12_or_higher = version_compare($version, '6.12.0', '>=') && !preg_match('/^6\\.12\\.0-/', $version);
if ($is_stable_6_12_or_higher) {
echo "Running on supported version {$version}, skipping cleanup\n";
echo "Running on supported version {$version}\n";
exit(0);
}
echo "Running on unsupported version {$version}, performing cleanup\n";
echo "Running cleanup operations...\n";
include_once("/usr/local/emhttp/plugins/dynamix.my.servers/scripts/cleanup_operations.php");
echo "Warning: Unsupported Unraid version {$version}. This plugin requires Unraid 6.12.0 or higher.\n";
echo "The plugin may not function correctly on this system.\n";
exit(0);
]]>
</INLINE>
</FILE>
<!-- uninstall existing plugin during update or removal -->
<FILE Run="/bin/bash" Method="install remove">
<!-- Backup files before installation -->
<FILE Run="/bin/bash" Method="install">
<INLINE>
<![CDATA[
echo "Uninstalling existing plugin"
version=
# shellcheck disable=SC1091
source /etc/unraid-version
<![CDATA[
echo "Backing up original files..."
if [ -e /etc/rc.d/rc.unraid-api ]; then
touch /tmp/restore-files-dynamix-unraid-net
# stop flash backup
/etc/rc.d/rc.flash_backup stop &>/dev/null
# stop the api gracefully
/etc/rc.d/rc.unraid-api stop &>/dev/null
# Stop newer clients
unraid-api stop
# forcibly stop older clients
kill -9 `pidof unraid-api` &>/dev/null
# Find all PIDs referencing main.js and kill them, excluding grep process
pids=$(ps aux | grep "node /usr/local/unraid-api/dist/main.js" | grep -v grep | awk '{print $2}')
for pid in $pids; do
kill -9 $pid
done
# uninstall the api
rm -rf /usr/local/unraid-api
rm -rf /var/run/unraid-api.sock
rm -rf /usr/.pnpm-store
# Define files to backup in a shell variable
FILES_TO_BACKUP=(
"/usr/local/emhttp/plugins/dynamix/DisplaySettings.page"
"/usr/local/emhttp/plugins/dynamix/Registration.page"
"/usr/local/emhttp/plugins/dynamix/include/DefaultPageLayout.php"
"/usr/local/emhttp/plugins/dynamix/include/ProvisionCert.php"
"/usr/local/emhttp/plugins/dynamix/include/UpdateDNS.php"
"/usr/local/emhttp/plugins/dynamix/include/ReplaceKey.php"
"/usr/local/emhttp/plugins/dynamix/include/Wrappers.php"
"/usr/local/emhttp/plugins/dynamix.plugin.manager/Downgrade.page"
"/usr/local/emhttp/plugins/dynamix.plugin.manager/Update.page"
"/usr/local/emhttp/plugins/dynamix.plugin.manager/include/ShowChanges.php"
"/usr/local/emhttp/plugins/dynamix.plugin.manager/scripts/showchanges"
"/usr/local/emhttp/plugins/dynamix.plugin.manager/scripts/unraidcheck"
"/usr/local/emhttp/plugins/dynamix.plugin.manager/include/UnraidCheck.php"
"/usr/local/emhttp/plugins/dynamix.plugin.manager/include/UnraidCheckExec.php"
"/usr/local/emhttp/plugins/dynamix.my.servers/Connect.page"
"/usr/local/emhttp/plugins/dynamix.my.servers/MyServers.page"
"/usr/local/emhttp/plugins/dynamix.my.servers/Registration.page"
"/usr/local/emhttp/plugins/dynamix.my.servers/include/myservers1.php"
"/usr/local/emhttp/plugins/dynamix.my.servers/include/myservers2.php"
"/usr/local/emhttp/plugins/dynamix.my.servers/include/state.php"
"/usr/local/emhttp/plugins/dynamix.my.servers/data/server-state.php"
"/usr/local/emhttp/plugins/dynamix.my.servers/include/reboot-details.php"
"/usr/local/emhttp/plugins/dynamix.my.servers/include/translations.php"
"/usr/local/emhttp/plugins/dynamix.my.servers/include/web-components-extractor.php"
"/usr/local/emhttp/update.htm"
"/usr/local/emhttp/logging.htm"
"/etc/nginx/nginx.conf"
"/etc/rc.d/rc.nginx"
"/usr/share/mozilla/firefox/9n35r0i1.default/user.js"
)
# Backup each file if it exists and doesn't already have a backup
for FILE in "${FILES_TO_BACKUP[@]}"; do
if [ -f "$FILE" ] && [ ! -f "$FILE-" ]; then
cp -p "$FILE" "$FILE-"
echo "Backed up: $FILE"
fi
done
# Handle the unraid-components directory
DIR=/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components
if [ -d "$DIR" ] && [ ! -d "$DIR-" ]; then
cp -rp "$DIR" "$DIR-"
echo "Backed up directory: $DIR"
fi
]]>
echo "Backup complete."
exit 0
]]>
</INLINE>
</FILE>
@@ -253,591 +149,116 @@ fi
MAINNAME="&name;"
<![CDATA[
echo "Removing Plugin"
[[ -f "/var/log/packages/${MAINNAME}" ]] && removepkg --terse "${MAINNAME}"
# Find any installed dynamix.unraid.net package
pkg_installed=$(ls -1 /var/log/packages/dynamix.unraid.net* 2>/dev/null | head -1)
if [ -n "$pkg_installed" ]; then
pkg_basename=$(basename "$pkg_installed")
echo "Removing package: $pkg_basename"
removepkg --terse "$pkg_basename"
else
echo "No dynamix.unraid.net package found. Trying with basic package name."
removepkg --terse "${MAINNAME}"
fi
# File restoration function
echo "Restoring files..."
# Define files to restore in a shell variable - must match backup list
FILES_TO_RESTORE=(
"/usr/local/emhttp/plugins/dynamix/DisplaySettings.page"
"/usr/local/emhttp/plugins/dynamix/Registration.page"
"/usr/local/emhttp/plugins/dynamix/include/DefaultPageLayout.php"
"/usr/local/emhttp/plugins/dynamix/include/ProvisionCert.php"
"/usr/local/emhttp/plugins/dynamix/include/UpdateDNS.php"
"/usr/local/emhttp/plugins/dynamix/include/ReplaceKey.php"
"/usr/local/emhttp/plugins/dynamix/include/Wrappers.php"
"/usr/local/emhttp/plugins/dynamix.plugin.manager/Downgrade.page"
"/usr/local/emhttp/plugins/dynamix.plugin.manager/Update.page"
"/usr/local/emhttp/plugins/dynamix.plugin.manager/include/ShowChanges.php"
"/usr/local/emhttp/plugins/dynamix.plugin.manager/scripts/showchanges"
"/usr/local/emhttp/plugins/dynamix.plugin.manager/scripts/unraidcheck"
"/usr/local/emhttp/plugins/dynamix.plugin.manager/include/UnraidCheck.php"
"/usr/local/emhttp/plugins/dynamix.plugin.manager/include/UnraidCheckExec.php"
"/usr/local/emhttp/plugins/dynamix.my.servers/Connect.page"
"/usr/local/emhttp/plugins/dynamix.my.servers/MyServers.page"
"/usr/local/emhttp/plugins/dynamix.my.servers/Registration.page"
"/usr/local/emhttp/plugins/dynamix.my.servers/include/myservers1.php"
"/usr/local/emhttp/plugins/dynamix.my.servers/include/myservers2.php"
"/usr/local/emhttp/plugins/dynamix.my.servers/include/state.php"
"/usr/local/emhttp/plugins/dynamix.my.servers/data/server-state.php"
"/usr/local/emhttp/plugins/dynamix.my.servers/include/reboot-details.php"
"/usr/local/emhttp/plugins/dynamix.my.servers/include/translations.php"
"/usr/local/emhttp/plugins/dynamix.my.servers/include/web-components-extractor.php"
"/usr/local/emhttp/update.htm"
"/usr/local/emhttp/logging.htm"
"/etc/nginx/nginx.conf"
"/etc/rc.d/rc.nginx"
"/usr/share/mozilla/firefox/9n35r0i1.default/user.js"
)
# Restore each file if backup exists
for FILE in "${FILES_TO_RESTORE[@]}"; do
[ -f "$FILE-" ] && mv -f "$FILE-" "$FILE"
done
# Handle the unraid-components directory
DIR=/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components
# certain instances where the directory is not present and others where it is, ensure we delete it before we restore it
if [ -d "$DIR" ]; then
rm -rf "$DIR"
fi
if [ -d "$DIR-" ]; then
mv -f "$DIR-" "$DIR"
fi
]]>
</INLINE>
</FILE>
<!-- uninstall existing plugin during removal -->
<FILE Run="/bin/bash" Method="install remove">
<INLINE>
<![CDATA[
echo "Restoring Files"
if [ -f /tmp/restore-files-dynamix-unraid-net ]; then
# restore stock files
FILES_TO_RESTORE=(
"/usr/local/emhttp/plugins/dynamix/DisplaySettings.page"
"/usr/local/emhttp/plugins/dynamix/Registration.page"
"/usr/local/emhttp/plugins/dynamix/include/DefaultPageLayout.php"
"/usr/local/emhttp/plugins/dynamix/include/ProvisionCert.php"
"/usr/local/emhttp/plugins/dynamix/include/UpdateDNS.php"
"/usr/local/emhttp/plugins/dynamix/include/ReplaceKey.php"
"/usr/local/emhttp/plugins/dynamix/include/Wrappers.php"
"/usr/local/emhttp/plugins/dynamix.plugin.manager/Downgrade.page"
"/usr/local/emhttp/plugins/dynamix.plugin.manager/Update.page"
"/usr/local/emhttp/plugins/dynamix.plugin.manager/include/ShowChanges.php"
"/usr/local/emhttp/plugins/dynamix.plugin.manager/scripts/showchanges"
"/usr/local/emhttp/plugins/dynamix.plugin.manager/scripts/unraidcheck"
"/usr/local/emhttp/plugins/dynamix.plugin.manager/include/UnraidCheck.php"
"/usr/local/emhttp/plugins/dynamix.plugin.manager/include/UnraidCheckExec.php"
"/usr/local/emhttp/plugins/dynamix.my.servers/Connect.page"
"/usr/local/emhttp/plugins/dynamix.my.servers/MyServers.page"
"/usr/local/emhttp/plugins/dynamix.my.servers/Registration.page"
"/usr/local/emhttp/plugins/dynamix.my.servers/include/myservers1.php"
"/usr/local/emhttp/plugins/dynamix.my.servers/include/myservers2.php"
"/usr/local/emhttp/plugins/dynamix.my.servers/include/state.php"
"/usr/local/emhttp/plugins/dynamix.my.servers/data/server-state.php"
"/usr/local/emhttp/plugins/dynamix.my.servers/include/reboot-details.php"
"/usr/local/emhttp/plugins/dynamix.my.servers/include/translations.php"
"/usr/local/emhttp/plugins/dynamix.my.servers/include/web-components-extractor.php"
)
for FILE in "${FILES_TO_RESTORE[@]}"; do
[[ -f "$FILE-" ]] && mv -f "$FILE-" "$FILE"
done
DIR=/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components
# certain instances where the directory is not present and others where it is, ensure we delete it before we restore it
if [[ -d "$DIR" ]]; then
rm -rf "$DIR"
fi
if [[ -d "$DIR-" ]]; then
mv -f "$DIR-" "$DIR"
fi
# delete plugin files from flash drive and OS
rm -f /boot/config/plugins/dynamix.my.servers/.gitignore
rm -f /etc/rc.d/rc.unraid-api
rm -f /etc/rc.d/rc.flash_backup
rm -rf /usr/local/sbin/unraid-api
rm -rf /usr/local/bin/unraid-api
rm -rf /usr/local/emhttp/plugins/dynamix.unraid.net
rm -rf /usr/local/emhttp/plugins/dynamix.unraid.net.staging
rm -f /etc/rc.d/rc6.d/K10_flash_backup
rm -f /var/log/gitcount
rm -f /var/log/gitflash
rm -f /var/log/gitratelimit
rm -f /usr/local/emhttp/state/flashbackup.ini
rm -f /usr/local/emhttp/state/myservers.cfg
# delete any legacy files that may exist
rm -rf /boot/config/plugins/dynamix.my.servers/libvirt.node
rm -rf /boot/config/plugins/dynamix.my.servers/segfault-handler.node
rm -rf /boot/config/plugins/dynamix.my.servers/wc
rm -f /boot/config/plugins/Unraid.net/unraid-api.tgz
rm -f /boot/config/plugins/Unraid.net/.gitignore
rm -f /boot/config/plugins/dynamix.my.servers/unraid-api.tgz
rm -rf /boot/config/plugins/Unraid.net/webComps
rm -rf /boot/config/plugins/Unraid.net/wc
rm -f /usr/local/emhttp/webGui/javascript/vue.js
rm -f /usr/local/emhttp/webGui/javascript/vue.min.js
rm -rf /usr/local/emhttp/webGui/webComps
rm -rf /usr/local/emhttp/webGui/wc
# clean up our optional makestate modifications in rc.nginx (on 6.9 and 6.10.0-rc[12])
sed -i '/scripts\/makestate/d' /etc/rc.d/rc.nginx
# clean up extra origin for robots.txt
sed -i '#robots.txt any origin/d' /etc/rc.d/rc.nginx
rm /tmp/restore-files-dynamix-unraid-net
fi
exit 0
]]>
</INLINE>
</FILE>
<!-- install all the things -->
<FILE Run="/bin/bash" Method="install">
<INLINE>
TAG="&TAG;" MAINTXZ="&source;.txz"
VENDOR_ARCHIVE="/boot/config/plugins/dynamix.my.servers/&VENDOR_STORE_FILENAME;"
PNPM_BINARY_FILE="&PNPM_BINARY;"
NODE_DIR="&NODE_DIR;"
TAG="&tag;"
PKG_FILE="&source;" # Full path to the package file including .txz extension
PKG_URL="&txz_url;" # URL where package was downloaded from
PKG_NAME="&txz_name;" # Name of the package file
VENDOR_ARCHIVE="/boot/config/plugins/dynamix.my.servers/&vendor_store_filename;"
<![CDATA[
appendTextIfMissing() {
FILE="$1" TEXT="$2"
if test -f "${FILE}" && ! grep -q "${TEXT}" "${FILE}" &>/dev/null; then
echo "${TEXT}">>"${FILE}"
fi
}
source /root/.bashrc
version=
# shellcheck disable=SC1091
source /etc/unraid-version
# exit this install block on isUnsupportedVersion
# must be 6.12.0 or higher (not 6.12.0-[beta|rc]x)
if [[ "${version:0:3}" == "6.9" || "${version:0:4}" == "6.10" || "${version:0:4}" == "6.11" || "${version:0:7}" == "6.12.0-" ]]; then
echo
echo "⚠️ Please uninstall this plugin or upgrade to a newer version of Unraid to enjoy Unraid Connect"
echo
echo "✅ It is safe to close this window"
echo
DIR="/usr/local/emhttp/plugins/dynamix.unraid.net" && [[ ! -d "$DIR" ]] && mkdir "$DIR"
cat << EOF > "$DIR/README.md"
**Unraid Connect**
Please uninstall this plugin or upgrade to a newer version of Unraid to enjoy Unraid Connect
EOF
# exit 0 or else the original plugin will be reinstalled at boot
exit 0
fi
# NOTE: any 'exit 1' after this point will result in a broken install
# Loop through the array of preserveFilesDirs and perform actions
# string param format
# "{move|copy|move_dir}:{path}:{preventDowngrade|skip}"
# move: move the file to a backup file
# copy: copy the file to a backup file
# move_dir: move the directory to a backup directory
# preventDowngrade: during plg install, if the file exists, do not overwrite it if the plg manifest version is less than the installed webgui version
# skip: do not perform any action if there is a manifest version difference
preserveFilesDirs=(
"move:/usr/local/emhttp/plugins/dynamix/Registration.page:preventDowngrade"
"move:/usr/local/emhttp/plugins/dynamix/include/UpdateDNS.php:preventDowngrade"
"move:/usr/local/emhttp/plugins/dynamix/include/ReplaceKey.php:preventDowngrade"
"move:/usr/local/emhttp/plugins/dynamix.plugin.manager/Downgrade.page:preventDowngrade"
"move:/usr/local/emhttp/plugins/dynamix.plugin.manager/Update.page:preventDowngrade"
"move:/usr/local/emhttp/plugins/dynamix.plugin.manager/scripts/unraidcheck:preventDowngrade"
"move:/usr/local/emhttp/plugins/dynamix.plugin.manager/include/UnraidCheck.php:preventDowngrade"
"move:/usr/local/emhttp/plugins/dynamix.plugin.manager/include/UnraidCheckExec.php:preventDowngrade"
"move:/usr/local/emhttp/plugins/dynamix.my.servers/MyServers.page:skip"
"move:/usr/local/emhttp/plugins/dynamix.my.servers/Connect.page:skip"
"move:/usr/local/emhttp/plugins/dynamix.my.servers/Registration.page:preventDowngrade"
"move:/usr/local/emhttp/plugins/dynamix.my.servers/include/myservers1.php:preventDowngrade"
"move:/usr/local/emhttp/plugins/dynamix.my.servers/include/myservers2.php:preventDowngrade"
"move:/usr/local/emhttp/plugins/dynamix.my.servers/include/state.php:preventDowngrade"
"move_dir:/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components:move_dir:preventDowngrade"
"move:/usr/local/emhttp/plugins/dynamix.my.servers/data/server-state.php:preventDowngrade"
"move:/usr/local/emhttp/plugins/dynamix.my.servers/include/reboot-details.php:preventDowngrade"
"move:/usr/local/emhttp/plugins/dynamix.my.servers/include/translations.php:preventDowngrade"
"move:/usr/local/emhttp/plugins/dynamix.my.servers/include/web-components-extractor.php:preventDowngrade"
"copy:/usr/local/emhttp/plugins/dynamix/include/DefaultPageLayout.php:preventDowngrade"
)
preserveAction() {
local action="$1"
local path="$2"
local preventType="$3" # preventDowngrade or skip
if [[ "$action" == "move" ]]; then
[[ -f "$path" ]] && mv -f "$path" "$path-"
elif [[ "$action" == "copy" ]]; then
[[ -f "$path" ]] && cp -f "$path" "$path-"
elif [[ "$action" == "move_dir" ]]; then
[[ -d "$path" ]] && mv -f "$path" "$path-"
fi
}
# Loop through the array of preserveFilesDirs and perform actions
for obj in "${preserveFilesDirs[@]}"
do
IFS=':' read -r action path preventType <<< "$obj"
preserveAction "$action" "$path" "$preventType"
done
# patch DefaultPageLayout.php
# search text: <?=_('Version')?>: <?=_var($var,'version','?')?><?=$notes?>
# <?=_('Version')?>: <?=$var['version']?><?=$notes?>
# replacement text: <unraid-i18n-host><unraid-header-os-version></unraid-header-os-version></unraid-i18n-host>
FILE=/usr/local/emhttp/plugins/dynamix/include/DefaultPageLayout.php
# get line number matching the search text
# shellcheck disable=SC2016
LINENUM=$(grep -n '<?=_('"'Version'"')?>: <?=_var($var,'"'version'"','"'?'"')?><?=$notes?>' "$FILE" | cut -d : -f 1)
# shellcheck disable=SC2016
[[ -z $LINENUM ]] && LINENUM=$(grep -n '<?=_('"'Version'"')?>: <?=$var\['"'version'"']?><?=$notes?>' "$FILE" | cut -d : -f 1)
if [[ -n $LINENUM ]]; then
# backup the file so it can be restored later
cp -f "$FILE" "$FILE-"
# sed should work, but it is very difficult to escape the search text
# instead, make a new file containing everything before LINENUM, then the replacement text, then everything after LINENUM
head -$((LINENUM-1)) "$FILE"> "$FILE~"
echo '<unraid-i18n-host><unraid-header-os-version></unraid-header-os-version></unraid-i18n-host>' >> "$FILE~"
tail +$((LINENUM+1)) "$FILE">> "$FILE~"
# disable these lines: <?$readme = @file_get_contents("$docroot/plugins/unRAIDServer/README.md",false,null,0,20)?:''?>
# <?$readme = @file_get_contents("$docroot/plugins/unRAIDServer/README.md",false,null,0,20)??'';?>
# <?$readme = @file_get_contents("$docroot/plugins/unRAIDServer/README.md",false,null,0,20);?>
# by replacing with: <?$readme ="removed by Connect";?>
sed -i '/unRAIDServer\/README\.md/c\<?$readme ="removed by Connect";?>' "$FILE~"
mv -f "$FILE~" "$FILE"
fi
# patch: showchanges, starting with 6.11.0-rc1
# ShowChanges.php, in 6.10
# search text: $valid = ['/var/tmp/','/tmp/plugins/'];
# replacement text: $valid = ['/var/tmp/','/tmp/plugins/','/boot/previous'];
FILES=(/usr/local/emhttp/plugins/dynamix.plugin.manager/scripts/showchanges /usr/local/emhttp/plugins/dynamix.plugin.manager/include/ShowChanges.php)
for FILE in "${FILES[@]}"; do
if test -f "${FILE}" && ! grep -q "'/boot/previous'" "${FILE}" &>/dev/null; then
# backup the file so it can be restored later
cp -f "$FILE" "$FILE-"
sed -i '/$valid = \[/c$valid = ['"'/var/tmp/'"','"'/tmp/plugins/'"','"'/boot/previous'"'];' "$FILE"
# Install the Slackware package
echo "Installing package..."
# Clean up any old package txz files if they don't match our current version
for txz_file in /boot/config/plugins/dynamix.my.servers/dynamix.unraid.net-*.txz; do
if [ -f "$txz_file" ] && [ "$txz_file" != "${PKG_FILE}" ]; then
echo "Removing old package file: $txz_file"
rm -f "$txz_file"
fi
done
# remove keys.limetechnology.com from hosts file
# brings older versions of Unraid in sync with 6.12.12
# no need to restore original file on uninstall
if grep -q "keys.lime-technology.com" /etc/hosts &>/dev/null; then sed -i "/keys.lime-technology.com/d" /etc/hosts &>/dev/null; fi
# patch ProvisionCert.php
# search text: curl_init("https://keys.lime-technology.com/account/ssl/provisionwildcard")
# curl_init("https://keys.lime-technology.com/account/ssl/$endpoint");
# prepend text: see $ADDTEXT4
ADDTEXT4=$(
cat <<'END_HEREDOC'
// added by Unraid Connect
// ensure keys.lime-technology.com is not hard-coded in the hosts file
exec('if grep -q "keys.lime-technology.com" /etc/hosts &>/dev/null; then sed -i "/keys.lime-technology.com/d" /etc/hosts &>/dev/null; fi');
END_HEREDOC
)
FILE=/usr/local/emhttp/plugins/dynamix/include/ProvisionCert.php
# get line number matching the search text
# shellcheck disable=SC2016
LINENUM=$(grep -n 'curl_init("https://keys.lime-technology.com/account/ssl/provisionwildcard")' "$FILE" | cut -d : -f 1)
[[ -z $LINENUM ]] && LINENUM=$(grep -n 'curl_init("https://keys.lime-technology.com/account/ssl/$endpoint")' "$FILE" | cut -d : -f 1)
if [[ -n $LINENUM ]]; then
# backup the file so it can be restored later
cp -f "$FILE" "$FILE-"
# sed should work, but it is very difficult to escape
# instead, make a new file containing everything before LINENUM, then the new text, then everything including and after LINENUM
head -$((LINENUM-1)) "$FILE"> "$FILE~"
echo "$ADDTEXT4">> "$FILE~"
echo "">> "$FILE~"
tail +$LINENUM "$FILE">> "$FILE~"
mv -f "$FILE~" "$FILE"
# Install the package using the explicit file path
upgradepkg --install-new --reinstall "${PKG_FILE}"
if [ $? -ne 0 ]; then
echo "⚠️ Package installation failed"
exit 1
fi
# move settings on flash drive
CFG_OLD=/boot/config/plugins/Unraid.net
CFG_NEW=/boot/config/plugins/dynamix.my.servers
[[ -d "$CFG_OLD" ]] && [[ ! -d "$CFG_NEW" ]] && mv "$CFG_OLD" "$CFG_NEW"
# relax restrictions on built-in Firefox so it can sign in to Unraid Connect
# brings older versions of Unraid in sync with 6.12.0
# no need to restore original file on uninstall
# note: if file is modified while Firefox is open it will have no effect, reboot to change the file before Firefox is loaded
# note: prior to 6.12, file will only exist if system is booted in GUI Mode
FILE=/usr/share/mozilla/firefox/9n35r0i1.default/user.js
if [[ -f "$FILE" ]]; then
cp -f "$FILE" "$FILE-"
appendTextIfMissing "${FILE}" 'user_pref("privacy.firstparty.isolate", false);'
appendTextIfMissing "${FILE}" 'user_pref("javascript.options.asmjs", true);'
appendTextIfMissing "${FILE}" 'user_pref("javascript.options.wasm", true);'
fi
# fix update.htm to work in an iframe
# brings older versions of Unraid in sync with 6.12.0
# no need to restore original file on uninstall, will cause issues if uninstall from within an iframe
FILE=/usr/local/emhttp/update.htm
if test -f "${FILE}" && grep -q "top.document" "${FILE}" &>/dev/null; then
cp -f "$FILE" "$FILE-"
sed -i 's/top.document/parent.document/gm' "${FILE}"
fi
# fix logging.htm (openBox) to work in an iframe
# brings older versions of Unraid in sync with 6.12.0
# no need to restore original file on uninstall
FILE=/usr/local/emhttp/logging.htm
if test -f "${FILE}" && grep -q "top.Shadowbox" "${FILE}" &>/dev/null; then
cp -f "$FILE" "$FILE-"
sed -i 's/top.Shadowbox/parent.Shadowbox/gm' "${FILE}"
fi
# ensure _var() is defined, brings older versions of Unraid in sync with 6.12.0
FILE=/usr/local/emhttp/plugins/dynamix/include/Wrappers.php
if test -f "${FILE}" && ! grep -q "function _var" "${FILE}" &>/dev/null; then
ADDTEXT1=$(
cat <<'END_HEREDOC'
// backported by Unraid Connect
function _var(&$name, $key=null, $default='') {
return is_null($key) ? ($name ?? $default) : ($name[$key] ?? $default);
}
END_HEREDOC
)
fi
# ensure my_logger() is defined, brings older versions of Unraid in sync with 6.13.0
if test -f "${FILE}" && ! grep -q "function my_logger" "${FILE}" &>/dev/null; then
ADDTEXT2=$(
cat <<'END_HEREDOC'
// backported by Unraid Connect
// ensure params passed to logger are properly escaped
function my_logger($message, $logger='webgui') {
exec('logger -t '.escapeshellarg($logger).' -- '.escapeshellarg($message));
}
END_HEREDOC
)
fi
# ensure http_get_contents() is defined, brings older versions of Unraid in sync with 6.13.0
if test -f "${FILE}" && ! grep -q "function http_get_contents" "${FILE}" &>/dev/null; then
ADDTEXT3=$(
cat <<'END_HEREDOC'
// backported by Unraid Connect
// Original PHP code by Chirp Internet: www.chirpinternet.eu
// Please acknowledge use of this code by including this header.
// https://www.the-art-of-web.com/php/http-get-contents/
// Modified for Unraid
/**
* Fetches URL and returns content
* @param string $url The URL to fetch
* @param array $opts Array of options to pass to curl_setopt()
* @param array $getinfo Empty array passed by reference, will contain results of curl_getinfo and curl_error
* @return string|false $out The fetched content
*/
function http_get_contents(string $url, array $opts = [], array &$getinfo = NULL) {
$ch = curl_init();
if(isset($getinfo)) {
curl_setopt($ch, CURLINFO_HEADER_OUT, TRUE);
}
curl_setopt($ch, CURLOPT_URL, $url);
curl_setopt($ch, CURLOPT_FRESH_CONNECT, true);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
curl_setopt($ch, CURLOPT_CONNECTTIMEOUT, 15);
curl_setopt($ch, CURLOPT_TIMEOUT, 45);
curl_setopt($ch, CURLOPT_ENCODING, "");
curl_setopt($ch, CURLOPT_FOLLOWLOCATION, true);
curl_setopt($ch, CURLOPT_REFERER, "");
curl_setopt($ch, CURLOPT_FAILONERROR, true);
curl_setopt($ch, CURLOPT_USERAGENT, 'Unraid');
if(is_array($opts) && $opts) {
foreach($opts as $key => $val) {
curl_setopt($ch, $key, $val);
}
}
$out = curl_exec($ch);
if (curl_errno($ch) == 23) {
// error 23 detected, try CURLOPT_ENCODING = "deflate"
curl_setopt($ch, CURLOPT_ENCODING, "deflate");
$out = curl_exec($ch);
}
if (isset($getinfo)) {
$getinfo = curl_getinfo($ch);
}
if ($errno = curl_errno($ch)) {
$msg = "Curl error $errno: " . (curl_error($ch) ?: curl_strerror($errno)) . ". Requested url: '$url'";
if(isset($getinfo)) {
$getinfo['error'] = $msg;
}
my_logger($msg, "http_get_contents");
}
curl_close($ch);
return $out;
}
END_HEREDOC
)
fi
if [[ -n "${ADDTEXT1}" || -n "${ADDTEXT2}" || -n "${ADDTEXT3}" ]]; then
TMP="$FILE.$RANDOM"
cp -f "$FILE" "$TMP"
cp -f "$FILE" "$FILE-"
# delete last line of the file if it contains `?>`
if test $( tail -n 1 "${TMP}" ) = '?>' ; then
sed -i '$ d' "${TMP}"
fi
[[ -n "${ADDTEXT1}" ]] && echo "${ADDTEXT1}">>"${TMP}"
[[ -n "${ADDTEXT2}" ]] && echo "${ADDTEXT2}">>"${TMP}"
[[ -n "${ADDTEXT3}" ]] && echo "${ADDTEXT3}">>"${TMP}"
echo "?>">>"${TMP}"
mv "${TMP}" "${FILE}"
fi
# Restore the upgradepkg binary if it exists and the original does not
if [[ -f /sbin/upgradepkg- && ! -f /sbin/upgradepkg ]]; then
cp -f /sbin/upgradepkg- /sbin/upgradepkg
fi
# install the main txz
upgradepkg --install-new --reinstall "${MAINTXZ}"
# confirm an expected file now exists
# WARNING: failure here results in broken install
[[ ! -f /usr/local/emhttp/plugins/dynamix.my.servers/scripts/gitflash_log ]] && echo "⚠️ files missing from main txz" && exit 1
if [[ -n "$TAG" && "$TAG" != "" ]]; then
printf -v sedcmd 's@^\*\*Unraid Connect\*\*@**Unraid Connect (%s)**@' "$TAG"
sed -i "${sedcmd}" "/usr/local/emhttp/plugins/dynamix.unraid.net/README.md"
fi
# setup env
echo "env=\"production\"">/boot/config/plugins/dynamix.my.servers/env
# Use myservers.cfg values to help prevent conflicts when installing
CFG=/boot/config/plugins/dynamix.my.servers/myservers.cfg
# shellcheck disable=SC1090
source <(grep 'email\|apikey="unraid_' "${CFG}" 2>/dev/null)
CFG_CLEANED=0
# If user is signed in but has no apikey, or if the apikey is not 64 chars, wipe the [remote] section from the cfg
if ([[ -n "${email}" && (-z "${apikey}" || "${#apikey}" -ne "64") ]]); then
# remove only the [remote] section
awk '{
if($0 ~ /\[remote\]/){output="off"; next}
if($0 ~ /\[/){output="on"; print; next}
if(output == "on"){print}
}' "${CFG}">"${CFG}-new" && mv "${CFG}-new" "${CFG}" CFG_CLEANED=1
echo "⚠️ Automatically signed out of Unraid.net"
fi
# configure flash backup to stop when the system starts shutting down
[[ ! -d /etc/rc.d/rc6.d ]] && mkdir /etc/rc.d/rc6.d
[[ ! -h /etc/rc.d/rc0.d ]] && ln -s /etc/rc.d/rc6.d /etc/rc.d/rc0.d
[[ ! -h /etc/rc.d/rc6.d/K10_flash_backup ]] && ln -s /etc/rc.d/rc.flash_backup /etc/rc.d/rc6.d/K10_flash_backup
# allow webgui to be iframed only on Connect dashboard, only applies to 6.10+
# if needed, restart nginx before installing the unraid-api
CHANGED=no
FILE=/etc/nginx/nginx.conf
# brings older versions of Unraid in sync with 6.12.0
if grep -q "SAMEORIGIN" "${FILE}"; then
CHANGED=yes
cp "$FILE" "$FILE-" OLD="add_header X-Frame-Options 'SAMEORIGIN';" NEW="add_header Content-Security-Policy \"frame-ancestors 'self' https://connect.myunraid.net/\";"
sed -i "s#${OLD}#${NEW}#" "${FILE}"
fi
FILE=/etc/rc.d/rc.nginx
# brings older versions of Unraid in sync with 6.12.0
if ! grep -q "#robots.txt any origin" "${FILE}"; then
CHANGED=yes
cp "$FILE" "$FILE-" FIND="location = \/robots.txt {"
# escape tabs and spaces
ADD="\ \ \ \ \ add_header Access-Control-Allow-Origin *; #robots.txt any origin"
sed -i "/${FIND}/a ${ADD}" "${FILE}"
fi
preventDowngradeAction() {
local action="$1"
local path="$2"
local preventType="$3" # preventDowngrade or skip
# if skip, do nothing
if [[ "$preventType" == "skip" ]]; then
return
fi
# restore the "backup" but keep the original backup for the uninstall plg script
# otherwise, the uninstall script will NOT be able to restore the original file
if [[ "$action" == "move" || "$action" == "copy" ]]; then
[[ -f "$path-" ]] && cp -f "$path-" "$path"
elif [[ "$action" == "move_dir" ]]; then
# if directory exists rm the original and copy the backup
# glob expansion via "$path-/"* …yes the * is necessary as we want to copy the contents of the directory
[[ -d "$path-" ]] && rm -rf "$path" && mkdir "$path" && cp -rf "$path-/"* "$path"
fi
}
# Extract "ts" values from both files
plgWebComponentPath="/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components"
backupWebComponentPath="/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components-"
plgManifestTs=$(find "$plgWebComponentPath" -name manifest.json -exec jq -r '.ts' {} \; 2>/dev/null)
webguiManifestTs=$(find "$backupWebComponentPath" -name manifest.json -exec jq -r '.ts' {} \; 2>/dev/null)
# Compare the "ts" values and return the file path of the higher value
if [[ "$webguiManifestTs" -gt "$plgManifestTs" ]]; then
# Loop through the array of preserveFilesDirs and perform actions
for obj in "${preserveFilesDirs[@]}"
do
IFS=':' read -r action path preventType <<< "$obj"
preventDowngradeAction "$action" "$path" "$preventType"
done
echo "♻️ Reverted to stock web component files"
fi
flash="/boot/config/plugins/dynamix.my.servers"
[[ ! -d "${flash}" ]] && echo "Please reinstall the Unraid Connect plugin" && exit 1
[[ ! -f "${flash}/env" ]] && echo 'env=production' >"${flash}/env"
env=production
# shellcheck disable=SC1091
source "${flash}/env"
# Install the API to /usr/local/unraid-api
api_base_directory="/usr/local/unraid-api"
unraid_binary_path="/usr/local/bin/unraid-api"
# Stop old process
if [[ -f "/usr/local/bin/unraid-api/unraid-api" ]]; then
/usr/local/bin/unraid-api/unraid-api stop
rm -rf /usr/local/bin/unraid-api
elif [[ -f "${unraid_binary_path}" ]]; then
${unraid_binary_path} stop
fi
# Kill any remaining unraid-api processes
pkill -9 unraid-api
# Create log directory (PM2 will not start without it)
mkdir -p /var/log/unraid-api
# Copy env file
cp "${api_base_directory}/.env.${env}" "${api_base_directory}/.env"
# bail if expected file does not exist
[[ ! -f "${api_base_directory}/package.json" ]] && echo "unraid-api install failed" && exit 1
# Create Symlink from /usr/local/unraid-api/dist/cli.js to /usr/local/bin/unraid-api
# Ensure we're linking the file, not the directory, by checking it exists first
if [[ -f "${api_base_directory}/dist/cli.js" ]]; then
ln -sf "${api_base_directory}/dist/cli.js" "${unraid_binary_path}"
else
echo "Error: ${api_base_directory}/dist/cli.js does not exist" && exit 1
fi
# Ensure unraid-api exists
if [[ ! -f "${unraid_binary_path}" ]]; then
echo "Error: unraid-api binary not found at ${unraid_binary_path}" && exit 1
fi
# Create symlink to unraid-api binary (to allow usage elsewhere)
ln -sf ${NODE_DIR}/bin/node /usr/local/bin/node
ln -sf ${NODE_DIR}/bin/npm /usr/local/bin/npm
ln -sf ${NODE_DIR}/bin/corepack /usr/local/bin/corepack
ln -sf ${unraid_binary_path} /usr/local/sbin/unraid-api
ln -sf ${unraid_binary_path} /usr/bin/unraid-api
cp -f "${PNPM_BINARY_FILE}" /usr/local/bin/pnpm
chmod +x /usr/local/bin/pnpm
/etc/rc.d/rc.unraid-api restore-dependencies "$VENDOR_ARCHIVE"
echo "Starting flash backup (if enabled)"
logger "Starting flash backup (if enabled)"
echo "/etc/rc.d/rc.flash_backup start" | at -M now &>/dev/null
. /root/.bashrc
logger "Starting Unraid API"
${unraid_binary_path} start
if [[ "${CHANGED}" == "yes" ]]; then
if /etc/rc.d/rc.nginx status &>/dev/null; then
# if nginx is running, reload it to enable the changes above
# note: if this is being installed at boot, nginx will not yet be running
echo ""
echo "✅ Installation complete, now reloading web server - it is safe to close this window"
/etc/rc.d/rc.nginx reload &>/dev/null
fi
else
echo
echo "✅ Installation is complete, it is safe to close this window"
echo
fi
echo
echo "✅ Installation is complete, it is safe to close this window"
echo
exit 0
]]>
</INLINE>
</FILE>
<!-- uninstall cleanup message -->
<FILE Run="/bin/bash" Method="remove">
<INLINE>
<![CDATA[
version=
# shellcheck disable=SC1091
source /etc/unraid-version
echo
echo "✅ Uninstall is complete, it is safe to close this window"
echo

View File

@@ -17,6 +17,10 @@ fi
CI=${CI:-false}
TAG="LOCAL_PLUGIN_BUILD"
IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '')
PACKAGE_LOCK_VERSION=$(jq -r '.version' package.json)
GIT_SHA=$(git rev-parse --short HEAD)
API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")
# Define container name for easier management
CONTAINER_NAME="plugin-builder"
@@ -28,4 +32,4 @@ docker ps -q --filter "name=${CONTAINER_NAME}" | xargs -r docker stop
# Start the container with the specified environment variables
echo "Starting plugin-builder container..."
docker compose run --remove-orphans --service-ports -e HOST_LAN_IP="$HOST_LAN_IP" -e CI="$CI" -e TAG="$TAG" ${CONTAINER_NAME} "$@"
docker compose run --remove-orphans --service-ports -e HOST_LAN_IP="$HOST_LAN_IP" -e CI="$CI" -e TAG="$TAG" -e API_VERSION="$API_VERSION" ${CONTAINER_NAME} "$@"

View File

@@ -8,104 +8,135 @@ flash="/boot/config/plugins/dynamix.my.servers"
[[ ! -d "${flash}" ]] && echo "Please reinstall the Unraid Connect plugin" && exit 1
[[ ! -f "${flash}/env" ]] && echo 'env=production' >"${flash}/env"
unraid_binary_path="/usr/local/bin/unraid-api"
dependencies_dir="/usr/local/unraid-api/node_modules"
api_base_dir="/usr/local/unraid-api"
scripts_dir="/usr/local/share/dynamix.unraid.net/scripts"
# Ensure script permissions
if [ -d "$scripts_dir" ]; then
chmod +x "$scripts_dir"/*.sh 2>/dev/null
fi
# Placeholder functions for plugin installation/uninstallation
install() {
true
}
uninstall() {
true
}
# Restores the node_modules directory from a backup file
# Args:
# $1 - Path to the backup file (tar.xz format)
# Returns:
# 0 on success, 1 on failure
# Note: Requires 1.5x the backup size in free space for safe extraction
restore_dependencies() {
local backup_file="$1"
# Check if backup file exists
if [ ! -f "$backup_file" ]; then
echo "Backup file not found at '$backup_file'. Skipping restore."
return 0
# Service control functions
start() {
echo "Starting Unraid API service..."
# Ensure dependencies are installed
if [ -x "$scripts_dir/dependencies.sh" ]; then
"$scripts_dir/dependencies.sh" ensure || {
echo "Failed to install dependencies aborting start."
return 1
}
else
echo "Warning: dependencies.sh script not found or not executable"
fi
# Check available disk space in destination
local backup_size
backup_size=$(stat -c%s "$backup_file")
local dest_space
dest_space=$(df --output=avail "$(dirname "$dependencies_dir")" | tail -n1)
dest_space=$((dest_space * 1024)) # Convert KB to bytes
# Require 1.5x the backup size for safe extraction
local required_space=$((backup_size + (backup_size / 2)))
if [ "$dest_space" -lt "$required_space" ]; then
echo "Error: Insufficient disk space in destination. Need at least $((required_space / 1024 / 1024))MB, have $((dest_space / 1024 / 1024))MB"
# Create log directory if it doesn't exist
mkdir -p /var/log/unraid-api
# Copy env file if needed
if [ -f "${api_base_dir}/.env.production" ] && [ ! -f "${api_base_dir}/.env" ]; then
cp "${api_base_dir}/.env.production" "${api_base_dir}/.env"
fi
# Start the flash backup service if available
if [ -x "/etc/rc.d/rc.flash_backup" ]; then
echo "Starting flash backup service..."
/etc/rc.d/rc.flash_backup start
fi
# Start the API service
if [ -x "${unraid_binary_path}" ]; then
"${unraid_binary_path}" start
return $?
else
echo "Error: Unraid API binary not found or not executable at ${unraid_binary_path}"
return 1
fi
echo "Restoring node_modules from '$backup_file' to '$dependencies_dir'"
# Remove existing store directory if it exists and ensure its parent directory exists
rm -rf "$dependencies_dir"
mkdir -p "$(dirname "$dependencies_dir")"
# Extract directly to final location
if ! tar -xJf "$backup_file" -C "$(dirname "$dependencies_dir")" --preserve-permissions; then
echo "Error: Failed to extract backup to final location."
rm -rf "$dependencies_dir"
return 1
fi
echo "node_modules restored successfully."
}
# Archives the node_modules directory to a specified location
# Args: none
# Returns:
# 0 on success, 1 on failure
archive_dependencies() {
local source_dir="/usr/local/unraid-api/node_modules"
local dest_dir="/boot/config/plugins/dynamix.my.servers"
local archive_file="${dest_dir}/node_modules.tar.xz"
# Check if source directory exists
if [ ! -d "$source_dir" ]; then
echo "Error: Source node_modules directory '$source_dir' does not exist."
stop() {
echo "Stopping Unraid API service..."
if [ -x "${unraid_binary_path}" ]; then
"${unraid_binary_path}" stop
return $?
else
echo "Error: Unraid API binary not found or not executable at ${unraid_binary_path}"
return 1
fi
}
# Create destination directory if it doesn't exist
mkdir -p "$dest_dir"
restart() {
stop
sleep 2
start
}
echo "Archiving node_modules from '$source_dir' to '$archive_file'"
# Create archive with XZ compression level 5, preserving symlinks
if XZ_OPT=-5 tar -cJf "$archive_file" -C "$(dirname "$source_dir")" "$(basename "$source_dir")"; then
echo "node_modules archive created successfully."
status() {
if [ -x "${unraid_binary_path}" ]; then
"${unraid_binary_path}" status
return $?
else
echo "Error: Failed to create node_modules archive."
echo "Error: Unraid API binary not found or not executable at ${unraid_binary_path}"
return 1
fi
}
case "$1" in
'start')
start
;;
'stop')
stop
;;
'restart'|'reload')
restart
;;
'status')
status
;;
'install')
install "$2"
;;
'reload')
restart
;;
'uninstall')
uninstall
;;
'restore-dependencies')
restore_dependencies "$2"
'ensure')
if [ -x "$scripts_dir/dependencies.sh" ]; then
"$scripts_dir/dependencies.sh" ensure "$2"
else
echo "Error: dependencies.sh script not found or not executable"
exit 1
fi
;;
'archive-dependencies')
archive_dependencies
if [ -x "$scripts_dir/dependencies.sh" ]; then
"$scripts_dir/dependencies.sh" archive
else
echo "Error: dependencies.sh script not found or not executable"
exit 1
fi
;;
'redownload-vendor-archive')
if [ -x "$scripts_dir/dependencies.sh" ]; then
if [ -n "$2" ] && [ -n "$3" ]; then
"$scripts_dir/dependencies.sh" redownload "$2" "$3"
else
echo "Usage: $0 redownload-vendor-archive <archive_path> <version>"
exit 1
fi
else
echo "Error: dependencies.sh script not found or not executable"
exit 1
fi
;;
*)
# Pass all other commands to unraid-api

View File

@@ -0,0 +1,7 @@
#!/bin/sh
# Stop flash-backup on shutdown/reboot
if [ -x /etc/rc.d/rc.flash_backup ]; then
echo "Stopping flash-backup..."
/etc/rc.d/rc.flash_backup stop
fi

View File

@@ -0,0 +1,7 @@
#!/bin/sh
# Stop unraid-api on shutdown/reboot
if [ -x /etc/rc.d/rc.unraid-api ]; then
echo "Stopping unraid-api..."
/etc/rc.d/rc.unraid-api stop
fi

View File

@@ -0,0 +1,109 @@
#!/bin/sh
set -eu
# Main installation script for dynamix.unraid.net package
# This script calls specialized external scripts to handle different aspects of installation
# Get the install mode (passed as the first argument by the installpkg script)
INSTALL_MODE="${1:-install}"
# Use absolute paths for script directory to avoid path resolution issues
SCRIPTS_DIR="/usr/local/share/dynamix.unraid.net/install/scripts"
# Log file for debugging
LOGFILE="/var/log/unraid-api/dynamix-unraid-install.log"
mkdir -p "$(dirname "$LOGFILE")"
date > "$LOGFILE"
echo "Starting installation with mode: $INSTALL_MODE" >> "$LOGFILE"
echo "Script directory: $SCRIPTS_DIR" >> "$LOGFILE"
# Make sure scripts are executable
if [ -d "$SCRIPTS_DIR" ]; then
chmod +x "$SCRIPTS_DIR"/*.sh
echo "Made scripts executable" >> "$LOGFILE"
else
echo "ERROR: Scripts directory not found: $SCRIPTS_DIR" >> "$LOGFILE"
# Create directory structure if it doesn't exist yet
mkdir -p "$SCRIPTS_DIR"
fi
# Process based on installation mode
if [ "$INSTALL_MODE" = "install" ] || [ "$INSTALL_MODE" = "upgrade" ]; then
echo "Starting Unraid Connect installation..."
# Apply file patches and system configurations
if [ -x "$SCRIPTS_DIR/file_patches.sh" ]; then
echo "Applying system patches and configurations..."
echo "Running file_patches.sh" >> "$LOGFILE"
# Capture output and add to log file
patches_output=$("$SCRIPTS_DIR/file_patches.sh")
echo "$patches_output" >> "$LOGFILE"
else
echo "ERROR: file_patches.sh not found or not executable" >> "$LOGFILE"
fi
# Setup the API (but don't start it yet)
if [ -x "$SCRIPTS_DIR/setup_api.sh" ]; then
echo "Setting up Unraid API..."
echo "Running setup_api.sh" >> "$LOGFILE"
# Capture output and add to log file
setup_output=$("$SCRIPTS_DIR/setup_api.sh")
echo "$setup_output" >> "$LOGFILE"
# Verify symlinks were created
if [ -L "/usr/local/bin/unraid-api" ]; then
echo "Symlink created successfully" >> "$LOGFILE"
else
echo "ERROR: Symlink not created, attempting to create manually" >> "$LOGFILE"
# Create the symlink manually as fallback
if [ -f "/usr/local/unraid-api/dist/cli.js" ]; then
ln -sf "/usr/local/unraid-api/dist/cli.js" "/usr/local/bin/unraid-api"
ln -sf "/usr/local/bin/unraid-api" "/usr/local/sbin/unraid-api"
ln -sf "/usr/local/bin/unraid-api" "/usr/bin/unraid-api"
echo "Manually created symlinks" >> "$LOGFILE"
else
echo "ERROR: Source file for symlink not found" >> "$LOGFILE"
fi
fi
else
echo "ERROR: setup_api.sh not found or not executable" >> "$LOGFILE"
fi
# Make the rc script executable
if [ -f /etc/rc.d/rc.unraid-api ]; then
chmod 755 /etc/rc.d/rc.unraid-api
echo "Made rc.unraid-api executable" >> "$LOGFILE"
# Start the service
/etc/rc.d/rc.unraid-api start
# Verify the service started successfully
if ! /etc/rc.d/rc.unraid-api status | grep -q "online"; then
echo "⚠️ Warning: Unraid API service failed to start" | tee -a "$LOGFILE"
echo "Check $LOGFILE for details"
else
echo "Unraid API service started successfully" >> "$LOGFILE"
fi
else
echo "ERROR: rc.unraid-api not found" >> "$LOGFILE"
fi
# Run post-installation verification
if [ -x "$SCRIPTS_DIR/verify_install.sh" ]; then
echo "Running post-installation verification..."
echo "Running verify_install.sh" >> "$LOGFILE"
# Capture output and add to log file
verify_output=$("$SCRIPTS_DIR/verify_install.sh")
echo "$verify_output" >> "$LOGFILE"
else
echo "ERROR: verify_install.sh not found or not executable" >> "$LOGFILE"
fi
echo "Installation completed successfully."
echo "Installation completed at $(date)" >> "$LOGFILE"
elif [ "$INSTALL_MODE" = "remove" ]; then
echo "Starting Unraid Connect removal..."
echo "Starting removal" >> "$LOGFILE"
echo "Removal completed successfully."
echo "Removal completed at $(date)" >> "$LOGFILE"
fi

View File

@@ -0,0 +1,18 @@
# HOW TO EDIT THIS FILE:
# The "handy ruler" below makes it easier to edit a package description.
# Line up the first '|' above the ':' following the base package name, and
# the '|' on the right side marks the last column you can put a character in.
# You must make exactly 11 lines for the formatting to be correct. It's also
# customary to leave one space after the ':' except on otherwise blank lines.
|-----handy-ruler------------------------------------------------------|
dynamix.unraid.net: dynamix.unraid.net (Unraid API)
dynamix.unraid.net:
dynamix.unraid.net: Provides the Unraid API and Connect services for Unraid OS.
dynamix.unraid.net: This package enables remote management, monitoring, and access
dynamix.unraid.net: to Unraid servers through the Unraid Connect platform.
dynamix.unraid.net:
dynamix.unraid.net: Features include system monitoring, notifications, remote access,
dynamix.unraid.net: and integration with the Unraid Connect dashboard.
dynamix.unraid.net:
dynamix.unraid.net: Homepage: https://unraid.net/

View File

@@ -146,26 +146,17 @@ class ServerState
private function setConnectValues()
{
if (file_exists('/var/lib/pkgtools/packages/dynamix.unraid.net')) {
if (file_exists('/usr/local/bin/unraid-api')) {
$this->connectPluginInstalled = 'dynamix.unraid.net.plg';
}
if (file_exists('/var/lib/pkgtools/packages/dynamix.unraid.net.staging')) {
$this->connectPluginInstalled = 'dynamix.unraid.net.staging.plg';
}
if ($this->connectPluginInstalled && !file_exists('/usr/bin/unraid-api')) {
$this->connectPluginInstalled .= '_installFailed';
}
// exit early if the plugin is not installed
if (!$this->connectPluginInstalled) {
return;
}
$this->connectPluginVersion = file_exists('/var/log/plugins/dynamix.unraid.net.plg')
? trim(@exec('/usr/local/sbin/plugin version /var/log/plugins/dynamix.unraid.net.plg 2>/dev/null'))
: (file_exists('/var/log/plugins/dynamix.unraid.net.staging.plg')
? trim(@exec('/usr/local/sbin/plugin version /var/log/plugins/dynamix.unraid.net.staging.plg 2>/dev/null'))
: 'base-' . $this->var['version']);
// Get version directly using api_utils.sh get_api_version function
$this->connectPluginVersion = trim(@exec('/usr/local/share/dynamix.unraid.net/scripts/api_utils.sh get_api_version 2>/dev/null')) ?: 'unknown';
$this->getMyServersCfgValues();
$this->getConnectKnownOrigins();

View File

@@ -1,87 +0,0 @@
#!/usr/bin/php
<?php
// Script Name: cleanup_operations.php
// Purpose: Handles cleanup operations for both install on unsupported OS and during removal
//
// Usage:
// ./cleanup_operations.php
// ./cleanup_operations.php --debug
// Parse command line arguments
$debug = false;
if (isset($argv)) {
foreach ($argv as $arg) {
if ($arg === '--debug') {
$debug = true;
}
}
}
// Debug function
function debug_log($message) {
global $debug;
if ($debug) {
echo "[DEBUG] [cleanup_operations]: $message\n";
}
}
// Get Unraid version and myservers config
$ver = @parse_ini_file('/etc/unraid-version', true)['version'];
$msini = @parse_ini_file('/boot/config/plugins/dynamix.my.servers/myservers.cfg', true);
debug_log("Unraid version: $ver");
debug_log("myservers.cfg exists: " . ($msini !== false ? "Yes" : "No"));
echo "\n";
echo "**********************************\n";
echo "🧹 CLEANING UP - may take a minute\n";
echo "**********************************\n";
if (file_exists("/boot/.git")) {
if (file_exists("/etc/rc.d/rc.flash_backup")) {
# stop flash backup service
echo "\nStopping flash backup service. Please wait…";
exec("/etc/rc.d/rc.flash_backup stop &>/dev/null");
}
if (file_exists("/usr/local/emhttp/plugins/dynamix.my.servers/include/UpdateFlashBackup.php")) {
# deactivate and delete local flash backup
echo "\nDeactivating flash backup. Please wait…";
passthru("/usr/bin/php /usr/local/emhttp/plugins/dynamix.my.servers/include/UpdateFlashBackup.php deactivate");
}
}
# set "Allow Remote Access" to "No" and sign out from Unraid Connect
if ($msini !== false) {
# stop unraid-api
echo "\nStopping unraid-api. Please wait…";
$output = shell_exec("/etc/rc.d/rc.unraid-api stop --delete 2>&1");
if (!$output) {
echo "Waiting for unraid-api to stop...\n";
sleep(5); // Give it a few seconds to fully stop
}
echo "Stopped unraid-api: $output";
if (!empty($msini['remote']['username'])) {
$var = parse_ini_file("/var/local/emhttp/var.ini");
$keyfile = @file_get_contents($var['regFILE']);
if ($keyfile !== false) {
echo "\nSigning out of Unraid Connect\n";
$ch = curl_init('https://keys.lime-technology.com/account/server/unregister');
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
curl_setopt($ch, CURLOPT_POST, 1);
curl_setopt($ch, CURLOPT_POSTFIELDS, ['keyfile' => @base64_encode($keyfile)]);
curl_exec($ch);
curl_close($ch);
}
}
# remove myservers.cfg
unlink('/boot/config/plugins/dynamix.my.servers/myservers.cfg');
# reload nginx to disable Remote Access
echo "\n⚠️ Reloading Web Server. If this window stops updating for two minutes please close it.\n";
exec("/etc/rc.d/rc.nginx reload &>/dev/null");
}
exit(0);

View File

@@ -0,0 +1,134 @@
#!/bin/sh
# Script to handle cleanup operations during removal
# Get the operation mode
MODE="${1:-cleanup}"
# Handle flash backup deactivation and Connect signout
perform_connect_cleanup() {
printf "\n**********************************\n"
printf "🧹 CLEANING UP - may take a minute\n"
printf "**********************************\n"
# Handle git-based flash backups
if [ -f "/boot/.git" ]; then
if [ -f "/etc/rc.d/rc.flash_backup" ]; then
printf "\nStopping flash backup service. Please wait...\n"
/etc/rc.d/rc.flash_backup stop >/dev/null 2>&1
fi
if [ -f "/usr/local/emhttp/plugins/dynamix.my.servers/include/UpdateFlashBackup.php" ]; then
printf "\nDeactivating flash backup. Please wait...\n"
/usr/bin/php /usr/local/emhttp/plugins/dynamix.my.servers/include/UpdateFlashBackup.php deactivate
fi
fi
# Check if myservers.cfg exists
if [ -f "/boot/config/plugins/dynamix.my.servers/myservers.cfg" ]; then
# Stop unraid-api
printf "\nStopping unraid-api. Please wait...\n"
output=$(/etc/rc.d/rc.unraid-api stop --delete 2>&1)
if [ -z "$output" ]; then
echo "Waiting for unraid-api to stop..."
sleep 5 # Give it time to stop
fi
echo "Stopped unraid-api: $output"
# Sign out of Unraid Connect (we'll use curl directly from shell)
# We need to extract the username from myservers.cfg and the registration key
if grep -q 'username' "/boot/config/plugins/dynamix.my.servers/myservers.cfg"; then
printf "\nSigning out of Unraid Connect\n"
# Check if regFILE exists in var.ini
if [ -f "/var/local/emhttp/var.ini" ]; then
regfile=$(grep "regFILE" "/var/local/emhttp/var.ini" | cut -d= -f2)
if [ -n "$regfile" ] && [ -f "$regfile" ]; then
# Base64 encode the key file and send to server
encoded_key=$(base64 "$regfile" | tr -d '\n')
if [ -n "$encoded_key" ]; then
curl -s -X POST "https://keys.lime-technology.com/account/server/unregister" \
-d "keyfile=$encoded_key" >/dev/null 2>&1
fi
fi
fi
fi
# Remove myservers.cfg
rm -f /boot/config/plugins/dynamix.my.servers/myservers.cfg
# Reload nginx to disable Remote Access
printf "\n⚠ Reloading Web Server. If this window stops updating for two minutes please close it.\n"
/etc/rc.d/rc.nginx reload >/dev/null 2>&1
fi
}
# Full cleanup function - runs only during removal
perform_full_cleanup() {
# Clean up Connect and Flash Backup services
perform_connect_cleanup
# Stop and remove the API
if [ -e /etc/rc.d/rc.unraid-api ]; then
# Stop flash backup
/etc/rc.d/rc.flash_backup stop >/dev/null 2>&1
# Stop the api gracefully
/etc/rc.d/rc.unraid-api stop >/dev/null 2>&1
# Stop newer clients
unraid-api stop
# Kill any processes
pid_list=$(pidof unraid-api 2>/dev/null) || true
[ -n "$pid_list" ] && kill -9 $pid_list
# Find all PIDs referencing main.js and kill them
node_pids=$(pgrep -f "node /usr/local/unraid-api/dist/main.js" 2>/dev/null) || true
[ -n "$node_pids" ] && echo "$node_pids" | xargs kill -9
# Clean up files
rm -rf /usr/local/unraid-api
rm -rf /var/run/unraid-api.sock
rm -rf /usr/.pnpm-store
fi
# Delete plugin files and cleanup
rm -f /boot/config/plugins/dynamix.my.servers/.gitignore
rm -f /etc/rc.d/rc.unraid-api
rm -f /etc/rc.d/rc.flash_backup
rm -rf /usr/local/sbin/unraid-api
rm -rf /usr/local/bin/unraid-api
rm -rf /usr/local/emhttp/plugins/dynamix.unraid.net
rm -rf /usr/local/emhttp/plugins/dynamix.unraid.net.staging
rm -f /etc/rc.d/rc6.d/K10_flash_backup
rm -f /var/log/gitcount
rm -f /var/log/gitflash
rm -f /var/log/gitratelimit
rm -f /usr/local/emhttp/state/flashbackup.ini
rm -f /usr/local/emhttp/state/myservers.cfg
# Delete any legacy files that may exist
rm -rf /boot/config/plugins/dynamix.my.servers/libvirt.node
rm -rf /boot/config/plugins/dynamix.my.servers/segfault-handler.node
rm -rf /boot/config/plugins/dynamix.my.servers/wc
rm -f /boot/config/plugins/Unraid.net/unraid-api.tgz
rm -f /boot/config/plugins/Unraid.net/.gitignore
rm -f /boot/config/plugins/dynamix.my.servers/unraid-api.tgz
rm -rf /boot/config/plugins/Unraid.net/webComps
rm -rf /boot/config/plugins/Unraid.net/wc
rm -f /usr/local/emhttp/webGui/javascript/vue.js
rm -f /usr/local/emhttp/webGui/javascript/vue.min.js
rm -rf /usr/local/emhttp/webGui/webComps
rm -rf /usr/local/emhttp/webGui/wc
# Clean up our optional makestate modifications in rc.nginx (on 6.9 and 6.10.0-rc[12])
sed -i '/scripts\/makestate/d' /etc/rc.d/rc.nginx
# Clean up extra origin for robots.txt
sed -i '/#robots.txt any origin/d' /etc/rc.d/rc.nginx
}
# Main execution flow based on mode
case "$MODE" in
'cleanup')
perform_full_cleanup
;;
*)
echo "Unknown mode: $MODE"
echo "Usage: $0 [cleanup]"
exit 1
;;
esac

View File

@@ -0,0 +1,74 @@
#!/bin/sh
# Script to handle file patches
# Patch nginx config if needed
NGINX_CHANGED=0
FILE=/etc/nginx/nginx.conf
if grep -q "SAMEORIGIN" "${FILE}" >/dev/null 2>&1; then
cp -p "$FILE" "$FILE-"
OLD="add_header X-Frame-Options 'SAMEORIGIN';"
NEW="add_header Content-Security-Policy \"frame-ancestors 'self' https://connect.myunraid.net/\";"
sed -i "s|${OLD}|${NEW}|" "${FILE}"
NGINX_CHANGED=1
fi
# Patch robots.txt handling
FILE=/etc/rc.d/rc.nginx
if ! grep -q "#robots.txt any origin" "${FILE}" >/dev/null 2>&1; then
cp -p "$FILE" "$FILE-"
FIND="location = \/robots.txt {"
# escape tabs and spaces
ADD="\t add_header Access-Control-Allow-Origin *; #robots.txt any origin"
# shell-safe: pass ADD via printf to preserve escapes
sed -i "/${FIND}/a $(printf '%s\n' "${ADD}")" "${FILE}"
NGINX_CHANGED=1
fi
# Remove keys.limetechnology.com from hosts file
if grep -q "keys.lime-technology.com" /etc/hosts >/dev/null 2>&1; then
sed -i "/keys.lime-technology.com/d" /etc/hosts >/dev/null 2>&1
fi
# Fix update.htm to work in an iframe
FILE=/usr/local/emhttp/update.htm
if [ -f "${FILE}" ] && grep -q "top.document" "${FILE}" >/dev/null 2>&1; then
cp -p "$FILE" "$FILE-"
sed -i 's|top.document|parent.document|gm' "${FILE}"
fi
# Fix logging.htm to work in an iframe
FILE=/usr/local/emhttp/logging.htm
if [ -f "${FILE}" ] && grep -q "top.Shadowbox" "${FILE}" >/dev/null 2>&1; then
cp -p "$FILE" "$FILE-"
sed -i 's|top.Shadowbox|parent.Shadowbox|gm' "${FILE}"
fi
# Relax restrictions on built-in Firefox
FIREFOX_DIR=/usr/share/mozilla/firefox
# Find the default profile directory (may change in future versions)
PROFILE_DIR=$(find "$FIREFOX_DIR" -name "*.default" -type d 2>/dev/null | head -n 1)
if [ -z "$PROFILE_DIR" ]; then
echo "Firefox default profile directory not found, skipping Firefox configuration"
else
FILE="$PROFILE_DIR/user.js"
if [ -f "$FILE" ]; then
cp -p "$FILE" "$FILE-"
# Append settings if they don't exist
grep -q "privacy.firstparty.isolate" "$FILE" || echo 'user_pref("privacy.firstparty.isolate", false);' >> "$FILE"
grep -q "javascript.options.asmjs" "$FILE" || echo 'user_pref("javascript.options.asmjs", true);' >> "$FILE"
grep -q "javascript.options.wasm" "$FILE" || echo 'user_pref("javascript.options.wasm", true);' >> "$FILE"
echo "Updated Firefox preferences in $FILE"
fi
fi
# Move settings on flash drive
CFG_OLD=/boot/config/plugins/Unraid.net
CFG_NEW=/boot/config/plugins/dynamix.my.servers
[ -d "$CFG_OLD" ] && [ ! -d "$CFG_NEW" ] && mv "$CFG_OLD" "$CFG_NEW"
# Reload nginx if needed
if [ "$NGINX_CHANGED" = "1" ] && /etc/rc.d/rc.nginx status >/dev/null 2>&1; then
echo "Reloading web server to apply changes"
/etc/rc.d/rc.nginx reload >/dev/null 2>&1
fi

View File

@@ -0,0 +1,100 @@
#!/bin/bash
# Script to handle API setup
# Setup environment
CONFIG_DIR="/boot/config/plugins/dynamix.my.servers"
API_BASE_DIR="/usr/local/unraid-api"
UNRAID_BINARY_PATH="/usr/local/bin/unraid-api"
echo "Starting API setup script"
echo "Environment: CONFIG_DIR=$CONFIG_DIR, API_BASE_DIR=$API_BASE_DIR"
echo "UNRAID_BINARY_PATH=$UNRAID_BINARY_PATH"
# Set up environment file
if [ ! -f "${CONFIG_DIR}/env" ]; then
echo "Creating env file at ${CONFIG_DIR}/env"
mkdir -p "${CONFIG_DIR}"
echo "env=\"production\"" > "${CONFIG_DIR}/env"
else
echo "Env file already exists"
fi
# Create log directory (PM2 will not start without it)
mkdir -p /var/log/unraid-api
echo "Created log directory at /var/log/unraid-api"
# Create Symlinks for the Unraid API
if [ -f "${API_BASE_DIR}/dist/cli.js" ]; then
echo "Creating symlinks for unraid-api"
ln -sf "${API_BASE_DIR}/dist/cli.js" "/usr/local/bin/unraid-api"
ln -sf "/usr/local/bin/unraid-api" "/usr/local/sbin/unraid-api"
ln -sf "/usr/local/bin/unraid-api" "/usr/bin/unraid-api"
# Verify symlinks were created
if [ -L "/usr/local/bin/unraid-api" ]; then
echo "Symlinks created successfully"
else
echo "ERROR: Failed to create symlinks"
fi
# Make API scripts executable
echo "Making API scripts executable"
chmod +x "${API_BASE_DIR}/dist/cli.js"
chmod +x "${API_BASE_DIR}/dist/main.js"
echo "API scripts are now executable"
else
echo "ERROR: Source file ${API_BASE_DIR}/dist/cli.js does not exist"
# Check if the directory exists
if [ -d "${API_BASE_DIR}" ]; then
echo "API base directory exists"
ls -la "${API_BASE_DIR}"
if [ -d "${API_BASE_DIR}/dist" ]; then
echo "Dist directory exists"
ls -la "${API_BASE_DIR}/dist"
else
echo "Dist directory does not exist"
fi
else
echo "API base directory does not exist"
fi
fi
# Copy env file
if [ -f "${API_BASE_DIR}/.env.production" ]; then
echo "Copying .env.production to .env"
cp "${API_BASE_DIR}/.env.production" "${API_BASE_DIR}/.env"
else
echo "ERROR: .env.production file not found"
fi
# Restore dependencies using vendor archive from package
if [ -x "/etc/rc.d/rc.unraid-api" ]; then
echo "Restoring dependencies using auto-detection"
if /etc/rc.d/rc.unraid-api ensure; then
echo "Dependencies restored successfully"
else
echo "ERROR: Failed to restore dependencies" >&2
exit 1
fi
else
echo "Dependencies not restored: rc.unraid-api executable not found"
fi
# Ensure rc directories exist and scripts are executable
echo "Ensuring shutdown scripts are executable"
if [ -d "/etc/rc.d/rc6.d" ]; then
chmod 755 /etc/rc.d/rc6.d/K*unraid-api 2>/dev/null
chmod 755 /etc/rc.d/rc6.d/K*flash-backup 2>/dev/null
else
echo "Warning: rc6.d directory does not exist"
fi
# Create symlink for rc0.d to rc6.d if needed
if [ ! -L /etc/rc.d/rc0.d ] && [ ! -d /etc/rc.d/rc0.d ]; then
echo "Creating symlink from /etc/rc.d/rc0.d to /etc/rc.d/rc6.d"
ln -s /etc/rc.d/rc6.d /etc/rc.d/rc0.d
fi
echo "API setup completed at $(date)"

View File

@@ -0,0 +1,180 @@
#!/bin/sh
# Unraid API Installation Verification Script
# Checks that critical files are installed correctly
# Exit on errors
set -e
echo "Performing comprehensive installation verification..."
# Define critical files to check (POSIX-compliant, no arrays)
CRITICAL_FILES="/usr/local/bin/unraid-api
/etc/rc.d/rc.unraid-api
/usr/local/emhttp/plugins/dynamix.my.servers/scripts/gitflash_log
/usr/local/share/dynamix.unraid.net/install/scripts/cleanup.sh
/usr/local/share/dynamix.unraid.net/install/scripts/file_patches.sh
/usr/local/share/dynamix.unraid.net/install/scripts/setup_api.sh"
# Define critical directories to check (POSIX-compliant, no arrays)
CRITICAL_DIRS="/usr/local/unraid-api
/var/log/unraid-api
/usr/local/emhttp/plugins/dynamix.my.servers
/usr/local/emhttp/plugins/dynamix.unraid.net
/etc/rc.d/rc6.d
/etc/rc.d/rc0.d"
# Define critical symlinks to check
CRITICAL_SYMLINKS="/usr/local/bin/unraid-api
/usr/local/sbin/unraid-api
/usr/bin/unraid-api"
# Track total errors
TOTAL_ERRORS=0
# Function to check if file exists and is executable
check_executable() {
if [ -x "$1" ]; then
printf '✓ Executable file %s exists and is executable\n' "$1"
return 0
elif [ -f "$1" ]; then
printf '⚠ File %s exists but is not executable\n' "$1"
return 1
else
printf '✗ Executable file %s is missing\n' "$1"
return 2
fi
}
# Function to check if directory exists
check_dir() {
if [ -d "$1" ]; then
printf '✓ Directory %s exists\n' "$1"
return 0
else
printf '✗ Directory %s is missing\n' "$1"
return 1
fi
}
# Function to check symlinks
check_symlink() {
if [ -L "$1" ]; then
printf '✓ Symlink %s exists -> %s\n' "$1" "$(readlink "$1")"
return 0
else
printf '✗ Symlink %s is missing\n' "$1"
return 1
fi
}
# Check executable files
echo "Checking executable files..."
EXEC_ERRORS=0
for file in $CRITICAL_FILES; do
if ! check_executable "$file"; then
EXEC_ERRORS=$((EXEC_ERRORS + 1))
fi
done
TOTAL_ERRORS=$((TOTAL_ERRORS + EXEC_ERRORS))
# Check directories
echo "Checking directories..."
DIR_ERRORS=0
for dir in $CRITICAL_DIRS; do
if ! check_dir "$dir"; then
DIR_ERRORS=$((DIR_ERRORS + 1))
fi
done
TOTAL_ERRORS=$((TOTAL_ERRORS + DIR_ERRORS))
# Check symlinks
echo "Checking symlinks..."
SYMLINK_ERRORS=0
for link in $CRITICAL_SYMLINKS; do
if ! check_symlink "$link"; then
SYMLINK_ERRORS=$((SYMLINK_ERRORS + 1))
fi
done
TOTAL_ERRORS=$((TOTAL_ERRORS + SYMLINK_ERRORS))
# Check environment file
ENV_FILE="/boot/config/plugins/dynamix.my.servers/env"
echo "Checking configuration files..."
CONFIG_ERRORS=0
if [ -f "$ENV_FILE" ]; then
printf '✓ Environment file %s exists\n' "$ENV_FILE"
else
printf '✗ Environment file %s is missing\n' "$ENV_FILE"
CONFIG_ERRORS=$((CONFIG_ERRORS + 1))
fi
TOTAL_ERRORS=$((TOTAL_ERRORS + CONFIG_ERRORS))
# Check for proper Slackware-style shutdown configuration
echo "Checking shutdown configuration..."
SHUTDOWN_ERRORS=0
# Check for package-provided shutdown scripts in rc6.d directory
echo "Checking for shutdown scripts in rc6.d..."
if [ -x "/etc/rc.d/rc6.d/K10flash-backup" ]; then
printf '✓ Shutdown script for flash-backup exists and is executable\n'
else
printf '✗ Shutdown script for flash-backup missing or not executable\n'
SHUTDOWN_ERRORS=$((SHUTDOWN_ERRORS + 1))
fi
# Check for unraid-api shutdown script
if [ -x "/etc/rc.d/rc6.d/K20unraid-api" ]; then
printf '✓ Shutdown script for unraid-api exists and is executable\n'
else
printf '✗ Shutdown script for unraid-api missing or not executable\n'
SHUTDOWN_ERRORS=$((SHUTDOWN_ERRORS + 1))
fi
# Check for rc0.d symlink or directory
if [ -L "/etc/rc.d/rc0.d" ]; then
printf '✓ rc0.d symlink exists\n'
elif [ -d "/etc/rc.d/rc0.d" ]; then
printf '✓ rc0.d directory exists\n'
else
printf '✗ rc0.d symlink or directory missing\n'
SHUTDOWN_ERRORS=$((SHUTDOWN_ERRORS + 1))
fi
TOTAL_ERRORS=$((TOTAL_ERRORS + SHUTDOWN_ERRORS))
# Check if unraid-api is in path
if command -v unraid-api >/dev/null 2>&1; then
printf '✓ unraid-api is in PATH\n'
else
printf '⚠ unraid-api is not in PATH\n'
TOTAL_ERRORS=$((TOTAL_ERRORS + 1))
fi
# Log file check
if [ -f "/var/log/unraid-api/dynamix-unraid-install.log" ]; then
printf '✓ Installation log file exists\n'
else
printf '⚠ Installation log file not found\n'
fi
# Summary
echo ""
echo "Verification summary:"
echo "- Executable files errors: $EXEC_ERRORS"
echo "- Directory errors: $DIR_ERRORS"
echo "- Symlink errors: $SYMLINK_ERRORS"
echo "- Configuration errors: $CONFIG_ERRORS"
echo "- Shutdown configuration errors: $SHUTDOWN_ERRORS"
echo "- Total errors: $TOTAL_ERRORS"
if [ $TOTAL_ERRORS -eq 0 ]; then
printf 'All checks passed successfully.\n'
echo "Installation verification completed successfully."
exit 0
else
printf 'Found %d total errors.\n' "$TOTAL_ERRORS"
echo "Installation verification completed with issues."
echo "See log file for details: /var/log/unraid-api/dynamix-unraid-install.log"
# We don't exit with error as this is just a verification script
exit 0
fi

View File

@@ -0,0 +1,70 @@
#!/bin/bash
# API Utilities
# Shared functions for API version detection and dependency management
# Default paths
CONFIG_FILE="/usr/local/share/dynamix.unraid.net/config/vendor_archive.json"
# Get API version from config file
# Returns the API version string or empty if not found
get_api_version() {
local api_version=""
# Get version from config file
if [ -f "$CONFIG_FILE" ] && command -v jq >/dev/null 2>&1; then
api_version=$(jq -r '.api_version' "$CONFIG_FILE")
if [ -n "$api_version" ] && [ "$api_version" != "null" ]; then
echo "$api_version"
return 0
fi
fi
# No version found
return 1
}
# Get vendor archive configuration information
# Returns an array of values: api_version, vendor_store_url, vendor_store_path
get_archive_information() {
# Define all local variables at the top
local api_version=""
local vendor_store_url=""
local vendor_store_path=""
if [ ! -f "$CONFIG_FILE" ]; then
echo "Vendor archive config file not found at $CONFIG_FILE" >&2
return 1
fi
# Read values from JSON config using jq
if command -v jq >/dev/null 2>&1; then
api_version=$(jq -r '.api_version' "$CONFIG_FILE")
vendor_store_url=$(jq -r '.vendor_store_url' "$CONFIG_FILE")
vendor_store_path=$(jq -r '.vendor_store_path' "$CONFIG_FILE")
else
echo "jq not found, can't parse config file" >&2
return 1
fi
# Validate that all required values exist and are not null
if [ -z "$api_version" ] || [ "$api_version" = "null" ]; then
echo "Invalid or missing api_version in config file" >&2
return 1
fi
if [ -z "$vendor_store_url" ] || [ "$vendor_store_url" = "null" ]; then
echo "Invalid or missing vendor_store_url in config file" >&2
return 1
fi
if [ -z "$vendor_store_path" ] || [ "$vendor_store_path" = "null" ]; then
echo "Invalid or missing vendor_store_path in config file" >&2
return 1
fi
# Return the values
echo "$api_version"
echo "$vendor_store_url"
echo "$vendor_store_path"
return 0
}

View File

@@ -0,0 +1,245 @@
#!/bin/bash
# Dependency management functions
# This script provides functions to manage node.js dependencies and vendor archives
# Source shared utilities
SCRIPT_DIR="$(dirname "${BASH_SOURCE[0]}")"
# shellcheck source=./api_utils.sh
source "${SCRIPT_DIR}/api_utils.sh"
# Default paths
DEPENDENCIES_DIR="/usr/local/unraid-api/node_modules"
# Function to attempt redownload of vendor archive if missing
# Args:
# $1 - Path to vendor archive to download (ignored, kept for backward compatibility)
redownload_vendor_archive() {
# Define all local variables at the top
local info
local api_version=""
local vendor_store_url=""
local vendor_store_path=""
# Get archive information
if ! mapfile -t info < <(get_archive_information); then
echo "Error: Failed to get vendor archive information. Cannot proceed." >&2
return 1
fi
api_version="${info[0]}"
vendor_store_url="${info[1]}"
vendor_store_path="${info[2]}"
echo "Attempting to download vendor archive for version $api_version"
# Create directory if it doesn't exist
mkdir -p "$(dirname "$vendor_store_path")"
# Attempt to download the vendor archive
echo "Downloading vendor archive from $vendor_store_url to $vendor_store_path"
if curl -f -L "$vendor_store_url" -o "$vendor_store_path"; then
echo "Successfully downloaded vendor archive to $vendor_store_path"
# Return the path to the downloaded archive
echo "$vendor_store_path"
return 0
else
echo "Failed to download vendor archive from URL"
return 1
fi
}
# Function to ensure vendor archive is available
# This tries to locate or download the appropriate vendor archive
# Returns the path to the vendor archive or empty string if not available
ensure_vendor_archive() {
# Define all local variables at the top
local info
local api_version=""
local vendor_store_url=""
local vendor_store_path=""
# Get archive information
if ! mapfile -t info < <(get_archive_information); then
echo "Error: Failed to get vendor archive information. Cannot proceed." >&2
return 1
fi
api_version="${info[0]}"
vendor_store_url="${info[1]}"
vendor_store_path="${info[2]}"
echo "Looking for vendor archive at $vendor_store_path" >&2
# Check if the expected archive exists
if [ -f "$vendor_store_path" ]; then
echo "$vendor_store_path"
return 0
fi
# Expected archive is missing, attempt to download
echo "Expected vendor archive missing at $vendor_store_path. Attempting to download..." >&2
downloaded_archive=$(redownload_vendor_archive)
if [ -n "$downloaded_archive" ] && [ -f "$downloaded_archive" ]; then
echo "$downloaded_archive"
return 0
fi
# No vendor archive available
echo "No vendor archive available" >&2
return 1
}
# Restores the node_modules directory from a backup file
# Args:
# $1 - Path to the backup file (tar.xz format)
# Returns:
# 0 on success, 1 on failure
# Note: Requires 1.5x the backup size in free space for safe extraction
restore_dependencies() {
backup_file="$1"
# Check if backup file exists
if [ ! -f "$backup_file" ]; then
echo "Backup file not found at '$backup_file'. Skipping restore."
return 0
fi
# Check available disk space in destination
backup_size=$(stat -c%s "$backup_file")
dest_space=$(df --output=avail "$(dirname "$DEPENDENCIES_DIR")" | tail -n1)
dest_space=$((dest_space * 1024)) # Convert KB to bytes
# Require 1.5x the backup size for safe extraction
required_space=$((backup_size + (backup_size / 2)))
if [ "$dest_space" -lt "$required_space" ]; then
echo "Error: Insufficient disk space in destination. Need at least $((required_space / 1024 / 1024))MB, have $((dest_space / 1024 / 1024))MB"
return 1
fi
echo "Restoring node_modules from '$backup_file' to '$DEPENDENCIES_DIR'"
# Remove existing store directory if it exists and ensure its parent directory exists
rm -rf "$DEPENDENCIES_DIR"
mkdir -p "$(dirname "$DEPENDENCIES_DIR")"
# Extract directly to final location
if ! tar -xJf "$backup_file" -C "$(dirname "$DEPENDENCIES_DIR")" --preserve-permissions; then
echo "Error: Failed to extract backup to final location."
rm -rf "$DEPENDENCIES_DIR"
return 1
fi
# Set ownership to root (0:0)
chown -R 0:0 "$DEPENDENCIES_DIR"
echo "node_modules restored successfully."
return 0
}
# Archives the node_modules directory to a specified location
# Returns:
# 0 on success, 1 on failure
archive_dependencies() {
# Define all local variables at the top
local info
local api_version=""
local vendor_store_url=""
local vendor_store_path=""
local source_dir="$DEPENDENCIES_DIR"
local archive_file=""
# Get archive information
if ! mapfile -t info < <(get_archive_information); then
echo "Error: Failed to get vendor archive information. Cannot proceed." >&2
return 1
else
api_version="${info[0]}"
vendor_store_url="${info[1]}"
vendor_store_path="${info[2]}"
archive_file="$vendor_store_path"
fi
# Check if source directory exists
if [ ! -d "$source_dir" ]; then
echo "Error: Source node_modules directory '$source_dir' does not exist."
return 1
fi
# Create destination directory if it doesn't exist
mkdir -p "$(dirname "$archive_file")"
echo "Archiving node_modules from '$source_dir' to '$archive_file'"
# Create archive with XZ compression level 5, preserving symlinks
if XZ_OPT=-5 tar -cJf "$archive_file" -C "$(dirname "$source_dir")" "$(basename "$source_dir")"; then
echo "node_modules archive created successfully."
return 0
else
echo "Error: Failed to create node_modules archive."
return 1
fi
}
# Function to ensure dependencies are installed
ensure() {
# Check if dependencies directory already exists and is populated
if [ -d "$DEPENDENCIES_DIR" ] && [ "$(ls -A "$DEPENDENCIES_DIR" 2>/dev/null)" ]; then
echo "Dependencies directory already exists and is populated."
return 0
fi
# If explicit archive path is provided, use it
if [ -n "$1" ] && [ -f "$1" ]; then
echo "Using provided vendor archive: $1"
restore_dependencies "$1"
return $?
fi
# Get vendor archive path
vendor_archive=$(ensure_vendor_archive)
if [ -n "$vendor_archive" ] && [ -f "$vendor_archive" ]; then
echo "Found vendor archive at $vendor_archive"
restore_dependencies "$vendor_archive"
return $?
else
echo "No vendor archive available. Cannot restore dependencies."
return 1
fi
}
# Main logic
case "$1" in
'restore')
if [ -n "$2" ]; then
restore_dependencies "$2"
exit $?
else
echo "Usage: $0 restore <archive_path>"
exit 1
fi
;;
'archive')
archive_dependencies
exit $?
;;
'ensure')
ensure "$2"
exit $?
;;
'redownload')
# The path argument is ignored but kept for backward compatibility
if downloaded_archive=$(redownload_vendor_archive) && [ -n "$downloaded_archive" ]; then
echo "Downloaded archive to: $downloaded_archive"
exit 0
else
echo "Failed to download vendor archive"
exit 1
fi
;;
*)
echo "Usage: $0 {restore|archive|ensure|redownload}"
exit 1
;;
esac

View File

@@ -25,6 +25,7 @@
"build": "vite build",
"build:watch": "vite build -c vite.web-component.ts --mode production --watch",
"build:wc": "REM_PLUGIN=true vite build -c vite.web-component.ts --mode production",
"build:all": "vite build && vite build -c vite.web-component.ts --mode production",
"clean": "rimraf dist",
"type-check": "vue-tsc --noEmit",
"lint": "eslint src",