feat: begin building plugin with node instead of bash (#1120)

<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->
## Summary by CodeRabbit

- **New Features**
- Enhanced automated build and release processes with containerized
builds, improved caching, and refined artifact handling.
- Introduced new configuration options to strengthen versioning,
integrity checks, and pull request tracking.
	- Added a new Dockerfile for building the Node.js application.
- Added new environment variables for API versioning and validation
control.
	- Implemented comprehensive management of PM2 processes and state.
- Introduced a new GitHub Actions workflow for automating staging plugin
deployment upon pull request closure.
	- Updated logic for handling plugin installation and error feedback.
	- Added new asynchronous methods for managing PM2 processes.
	- Updated logging configurations for better control over log outputs.
	- Added Prettier configuration for consistent code formatting.
- Introduced a configuration to prevent the application from watching
for file changes.

- **Bug Fixes**
- Improved error handling and user feedback during the installation of
staging versions.

- **Documentation**
- Removed outdated introductory documentation to streamline project
information.

- **Chores**
- Updated deployment routines and validation steps to improve release
consistency and error handling.
- Simplified packaging and build scripts for smoother staging and
production workflows.
	- Excluded sensitive files from the Docker build context.
- Updated the `.gitignore` file to prevent unnecessary files from being
tracked.
- Adjusted the test timeout configuration for improved test reliability.
<!-- end of auto-generated comment: release notes by coderabbit.ai -->

---------

Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>
This commit is contained in:
Eli Bosley
2025-02-06 12:32:41 -05:00
committed by GitHub
parent 321703e907
commit 4f5c367fdf
30 changed files with 3003 additions and 153 deletions
+317
View File
@@ -0,0 +1,317 @@
import { execSync } from "child_process";
import { cp, readFile, writeFile, mkdir, readdir } from "fs/promises";
import { basename, join } from "path";
import { createHash } from "node:crypto";
import { $, cd, dotenv } from "zx";
import { z } from "zod";
import conventionalChangelog from "conventional-changelog";
import { escape as escapeHtml } from "html-sloppy-escaper";
import { parse } from "semver";
import { existsSync } from "fs";
import { format as formatDate } from "date-fns";
const envSchema = z.object({
API_VERSION: z.string().refine((v) => {
return parse(v) ?? false;
}, "Must be a valid semver version"),
API_SHA256: z.string().regex(/^[a-f0-9]{64}$/),
PR: z
.string()
.optional()
.refine((v) => !v || /^\d+$/.test(v), "Must be a valid PR number"),
SKIP_SOURCE_VALIDATION: z
.string()
.optional()
.default("false")
.refine((v) => v === "true" || v === "false", "Must be true or false"),
});
type Env = z.infer<typeof envSchema>;
const validatedEnv = envSchema.parse(dotenv.config() as Env);
const pluginName = "dynamix.unraid.net" as const;
const startingDir = process.cwd();
const BASE_URLS = {
STABLE: "https://stable.dl.unraid.net/unraid-api",
PREVIEW: "https://preview.dl.unraid.net/unraid-api",
} as const;
// Ensure that git is available
try {
await $`git log -1 --pretty=%B`;
} catch (err) {
console.error(`Error: git not available: ${err}`);
process.exit(1);
}
const createBuildDirectory = async () => {
await execSync(`rm -rf deploy/pre-pack/*`);
await execSync(`rm -rf deploy/release/*`);
await execSync(`rm -rf deploy/test/*`);
await mkdir("deploy/pre-pack", { recursive: true });
await mkdir("deploy/release/plugins", { recursive: true });
await mkdir("deploy/release/archive", { recursive: true });
await mkdir("deploy/test", { recursive: true });
};
function updateEntityValue(
xmlString: string,
entityName: string,
newValue: string
) {
const regex = new RegExp(`<!ENTITY ${entityName} "[^"]*">`);
if (regex.test(xmlString)) {
return xmlString.replace(regex, `<!ENTITY ${entityName} "${newValue}">`);
}
throw new Error(`Entity ${entityName} not found in XML`);
}
const validateSourceDir = async () => {
console.log("Validating TXZ source directory");
const sourceDir = join(startingDir, "source");
if (!existsSync(sourceDir)) {
throw new Error(`Source directory ${sourceDir} does not exist`);
}
// Validate existence of webcomponent files:
// source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components
const webcomponentDir = join(
sourceDir,
"dynamix.unraid.net",
"usr",
"local",
"emhttp",
"plugins",
"dynamix.my.servers",
"unraid-components"
);
if (!existsSync(webcomponentDir)) {
throw new Error(`Webcomponent directory ${webcomponentDir} does not exist`);
}
// Validate that there are webcomponents
const webcomponents = await readdir(webcomponentDir);
if (webcomponents.length === 1 && webcomponents[0] === ".gitkeep") {
throw new Error(`No webcomponents found in ${webcomponentDir}`);
}
};
const buildTxz = async (
version: string
): Promise<{
txzName: string;
txzSha256: string;
}> => {
if (validatedEnv.SKIP_SOURCE_VALIDATION !== "true") {
await validateSourceDir();
}
const txzName = `${pluginName}-${version}.txz`;
const txzPath = join(startingDir, "deploy/release/archive", txzName);
const prePackDir = join(startingDir, "deploy/pre-pack");
// Copy all files from source to temp dir, excluding specific files
await cp(join(startingDir, "source/dynamix.unraid.net"), prePackDir, {
recursive: true,
filter: (src) => {
const filename = basename(src);
return ![
".DS_Store",
"pkg_build.sh",
"makepkg",
"explodepkg",
"sftp-config.json",
".gitkeep",
].includes(filename);
},
});
// Create package - must be run from within the pre-pack directory
// Use cd option to run command from prePackDir
await cd(prePackDir);
$.verbose = true;
await $`${join(startingDir, "scripts/makepkg")} -l y -c y "${txzPath}"`;
$.verbose = false;
await cd(startingDir);
// Calculate hashes
const sha256 = createHash("sha256")
.update(await readFile(txzPath))
.digest("hex");
console.log(`TXZ SHA256: ${sha256}`);
try {
await $`${join(startingDir, "scripts/explodepkg")} "${txzPath}"`;
} catch (err) {
console.error(`Error: invalid txz package created: ${txzPath}`);
process.exit(1);
}
return { txzSha256: sha256, txzName };
};
const getStagingChangelogFromGit = async (
apiVersion: string,
pr: string | null = null
): Promise<string | null> => {
console.debug("Getting changelog from git" + (pr ? " for PR" : ""));
try {
const changelogStream = conventionalChangelog(
{
preset: "conventionalcommits",
},
{
version: apiVersion,
},
pr
? {
from: "origin/main",
to: "HEAD",
}
: {},
undefined,
pr
? {
headerPartial: `## [PR #${pr}](https://github.com/unraid/api/pull/${pr})\n\n`,
}
: undefined
);
let changelog = "";
for await (const chunk of changelogStream) {
changelog += chunk;
}
// Encode HTML entities using the 'he' library
return escapeHtml(changelog) ?? null;
} catch (err) {
console.error(`Error: failed to get changelog from git: ${err}`);
process.exit(1);
}
};
const buildPlugin = async ({
type,
txzSha256,
txzName,
version,
pr = "",
apiVersion,
apiSha256,
}: {
type: "staging" | "pr" | "production";
txzSha256: string;
txzName: string;
version: string;
pr?: string;
apiVersion: string;
apiSha256: string;
}) => {
const rootPlgFile = join(startingDir, "/plugins/", `${pluginName}.plg`);
// Set up paths
const newPluginFile = join(
startingDir,
"/deploy/release/plugins/",
`${pluginName}${type === "production" ? "" : `.${type}`}.plg`
);
// Define URLs
let PLUGIN_URL = "";
let MAIN_TXZ = "";
let API_TGZ = "";
let RELEASE_NOTES: string | null = null;
switch (type) {
case "production":
PLUGIN_URL = `${BASE_URLS.STABLE}/${pluginName}.plg`;
MAIN_TXZ = `${BASE_URLS.STABLE}/${txzName}`;
API_TGZ = `${BASE_URLS.STABLE}/unraid-api-${apiVersion}.tgz`;
break;
case "pr":
PLUGIN_URL = `${BASE_URLS.PREVIEW}/pr/${pr}/${pluginName}.plg`;
MAIN_TXZ = `${BASE_URLS.PREVIEW}/pr/${pr}/${txzName}`;
API_TGZ = `${BASE_URLS.PREVIEW}/pr/${pr}/unraid-api-${apiVersion}.tgz`;
RELEASE_NOTES = await getStagingChangelogFromGit(apiVersion, pr);
break;
case "staging":
PLUGIN_URL = `${BASE_URLS.PREVIEW}/${pluginName}.plg`;
MAIN_TXZ = `${BASE_URLS.PREVIEW}/${txzName}`;
API_TGZ = `${BASE_URLS.PREVIEW}/unraid-api-${apiVersion}.tgz`;
RELEASE_NOTES = await getStagingChangelogFromGit(apiVersion);
break;
}
// Update plg file
let plgContent = await readFile(rootPlgFile, "utf8");
// Update entity values
const entities: Record<string, string> = {
name: pluginName,
env: type === "pr" ? "staging" : type,
version: version,
pluginURL: PLUGIN_URL,
SHA256: txzSha256,
MAIN_TXZ: MAIN_TXZ,
API_TGZ: API_TGZ,
PR: pr,
API_version: apiVersion,
API_SHA256: apiSha256,
};
// Iterate over entities and update them
Object.entries(entities).forEach(([key, value]) => {
if (key !== "PR" && !value) {
throw new Error(`Entity ${key} not set in entities : ${value}`);
}
plgContent = updateEntityValue(plgContent, key, value);
});
if (RELEASE_NOTES) {
// Update the CHANGES section with release notes
plgContent = plgContent.replace(
/<CHANGES>.*?<\/CHANGES>/s,
`<CHANGES>\n${RELEASE_NOTES}\n</CHANGES>`
);
}
await writeFile(newPluginFile, plgContent);
console.log(`${type} plugin: ${newPluginFile}`);
};
/**
* Main build script
*/
const main = async () => {
await createBuildDirectory();
const version = formatDate(new Date(), "yyyy.MM.dd.HHmm");
console.log(`Version: ${version}`);
const { txzSha256, txzName } = await buildTxz(version);
const { API_VERSION, API_SHA256, PR } = validatedEnv;
await buildPlugin({
type: "staging",
txzSha256,
txzName,
version,
apiVersion: API_VERSION,
apiSha256: API_SHA256,
});
if (PR) {
await buildPlugin({
type: "pr",
txzSha256,
txzName,
version,
pr: PR,
apiVersion: API_VERSION,
apiSha256: API_SHA256,
});
}
await buildPlugin({
type: "production",
txzSha256,
txzName,
version,
apiVersion: API_VERSION,
apiSha256: API_SHA256,
});
};
await main();
+108
View File
@@ -0,0 +1,108 @@
#!/bin/bash
# Copyright 1994, 1998, 2000 Patrick Volkerding, Concord, CA, USA
# Copyright 2001, 2003 Slackware Linux, Inc., Concord, CA, USA
# Copyright 2007, 2009, 2017, 2018 Patrick Volkerding, Sebeka, MN, USA
# All rights reserved.
#
# Redistribution and use of this script, with or without modification, is
# permitted provided that the following conditions are met:
#
# 1. Redistributions of this script must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
if [ $# = 0 ]; then
cat << EOF
Usage: explodepkg package_name [package_name2, ...]
Explodes a Slackware compatible software package
(or any tar+{gzip,bzip2,lz,xz archive) in the current directory.
Equivalent to (for each package listed):
( umask 000 ; cat package_name | COMPRESSOR -dc | tar xpvf package_name )
Note: This should only be used for debugging or examining packages, not for
installing them. It doesn't execute installation scripts or update the package
indexes in /var/lib/pkgtools/packages and /var/lib/pkgtools/scripts.
EOF
fi
# Set maximum number of threads to use. By default, this will be the number
# of CPU threads:
THREADS="$(nproc)"
# Main loop:
for PKG in $* ; do
echo "Exploding package $PKG in current directory:"
# Determine extension:
packageext="$( echo $PKG | rev | cut -f 1 -d . | rev)"
# Determine compression utility:
case $packageext in
'tgz' )
packagecompression=gzip
;;
'gz' )
packagecompression=gzip
;;
'tbz' )
if which lbzip2 1> /dev/null 2> /dev/null ; then
packagecompression=lbzip2
else
packagecompression=bzip2
fi
;;
'bz2' )
if which lbzip2 1> /dev/null 2> /dev/null ; then
packagecompression=lbzip2
else
packagecompression=bzip2
fi
;;
'tlz' )
if which plzip 1> /dev/null 2> /dev/null ; then
packagecompression="plzip --threads=${THREADS}"
elif which lzip 1> /dev/null 2> /dev/null ; then
packagecompression=lzip
else
echo "ERROR: lzip compression utility not found in \$PATH."
exit 3
fi
;;
'lz' )
if which plzip 1> /dev/null 2> /dev/null ; then
packagecompression="plzip --threads=${THREADS}"
elif which lzip 1> /dev/null 2> /dev/null ; then
packagecompression=lzip
else
echo "ERROR: lzip compression utility not found in \$PATH."
exit 3
fi
;;
'lzma' )
packagecompression=lzma
;;
'txz' )
packagecompression="xz --threads=${THREADS}"
;;
'xz' )
packagecompression="xz --threads=${THREADS}"
;;
esac
( umask 000 ; cat $PKG | $packagecompression -dc | tar --xattrs --xattrs-include='*' --keep-directory-symlink -xpvf - 2> /dev/null )
if [ -r install/doinst.sh ]; then
echo
echo "An installation script was detected in ./install/doinst.sh, but"
echo "was not executed."
fi
done
+459
View File
@@ -0,0 +1,459 @@
#!/bin/bash
# Copyright 1994, 1998, 2008 Patrick Volkerding, Moorhead, Minnesota USA
# Copyright 2003 Slackware Linux, Inc. Concord, CA USA
# Copyright 2009, 2015, 2017, 2018, 2019 Patrick J. Volkerding, Sebeka, MN, USA
# All rights reserved.
#
# Redistribution and use of this script, with or without modification, is
# permitted provided that the following conditions are met:
#
# 1. Redistributions of this script must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Mon 2 Jul 15:32:14 UTC 2018
# Sort file lists and support SOURCE_DATE_EPOCH, for reproducibility.
#
# Mon May 21 18:31:20 UTC 2018
# Add --compress option, usually used to change the preset compression level
# or block size.
#
# Tue Feb 13 00:46:12 UTC 2018
# Use recent tar, and support storing POSIX ACLs and extended attributes.
#
# Tue Dec 12 21:55:59 UTC 2017
# If possible, use multiple compression threads.
#
# Wed Sep 23 18:36:43 UTC 2015
# Support spaces in file/directory names. <alphageek>
#
# Sun Apr 5 21:23:26 CDT 2009
# Support .tgz, .tbz, .tlz, and .txz packages. <volkerdi>
#
# Fri Nov 26 13:53:36 GMT 2004
# Patched to chmod 755 the package's root directory if needed, then restore
# previous permissions after the package has been created. <sw>
#
# Wed Mar 18 15:32:33 CST 1998
# Patched to avoid possible symlink attacks in /tmp.
CWD=$(pwd)
umask 022
make_install_script() {
TAB="$(echo -e "\t")"
COUNT=1
while :; do
LINE="$(sed -n "$COUNT p" $1)"
if [ "$LINE" = "" ]; then
break
fi
LINKGOESIN="$(echo "$LINE" | cut -f 1 -d "$TAB")"
LINKGOESIN="$(dirname "$LINKGOESIN")"
LINKNAMEIS="$(echo "$LINE" | cut -f 1 -d "$TAB")"
LINKNAMEIS="$(basename "$LINKNAMEIS")"
LINKPOINTSTO="$(echo "$LINE" | cut -f 2 -d "$TAB")"
echo "( cd $LINKGOESIN ; rm -rf $LINKNAMEIS )"
echo "( cd $LINKGOESIN ; ln -sf $LINKPOINTSTO $LINKNAMEIS )"
COUNT=$(expr $COUNT + 1)
done
}
usage() {
cat << EOF
Usage: makepkg package_name.tgz
(or: package_name.tbz, package_name.tlz, package_name.txz)
Makes a Slackware compatible package containing the contents of the current
and all subdirectories. If symbolic links exist, they will be removed and
an installation script will be made to recreate them later. This script will
be called "install/doinst.sh". You may add any of your own ash-compatible
shell scripts to this file and rebuild the package if you wish.
options: -l, --linkadd y|n (moves symlinks into doinst.sh: recommended)
-p, --prepend (prepend rather than append symlinks to an existing
doinst.sh. Useful to link libraries needed by programs in
the doinst.sh script)
-c, --chown y|n (resets all permissions to root:root 755 - not
generally recommended)
--threads <number> For xz/plzip compressed packages, set the max
number of threads to be used for compression. Only has an
effect on large packages. For plzip, the default is equal to
the number of CPU threads available on the machine. For xz,
the default is equal to 2 (due to commonly occuring memory
related failures when using many threads with multi-threaded
xz compression).
--compress <option> Supply a custom option to the compressor.
This will be used in place of the default, which is: -9
--acls Support storing POSIX ACLs in the package. The resulting
package will not be compatible with pkgtools version < 15.0.
--xattrs Support storing extended attributes in the package. The
resulting package will not be compatible with pkgtools
version < 15.0.
If these options are not set, makepkg will prompt if appropriate.
EOF
}
TMP=/tmp # This can be a hole, but I'm going to be careful about file
# creation in there, so don't panic. :^)
# Set maximum number of threads to use. By default, this will be the number
# of CPU threads:
THREADS="$(nproc)"
# Set default compression option.
COMPRESS_OPTION="-9"
# Parse options
unset ACLS XATTRS
while [ 0 ]; do
if [ "$1" = "--linkadd" -o "$1" = "-l" ]; then
if [ "$2" = "y" ]; then
LINKADD=y
elif [ "$2" = "n" ]; then
LINKADD=n
else
usage
exit 2
fi
shift 2
elif [ "$1" = "--chown" -o "$1" = "-c" ]; then
if [ "$2" = "y" ]; then
CHOWN=y
elif [ "$2" = "n" ]; then
CHOWN=n
else
usage
exit 2
fi
shift 2
elif [ "$1" = "-p" -o "$1" = "--prepend" ]; then
PREPEND=y
shift 1
elif [ "$1" = "-threads" -o "$1" = "--threads" ]; then
THREADS="$2"
shift 2
# xz has memory issues with threads it seems, so we'll use two threads by
# default unless we see that something else was user-selected:
XZ_THREADS_FORCED=yes
elif [ "$1" = "-compress" -o "$1" = "--compress" ]; then
COMPRESS_OPTION="$2"
shift 2
elif [ "$1" = "--acls" ]; then
ACLS="--acls"
shift 1
elif [ "$1" = "--xattrs" ]; then
XATTRS="--xattrs"
shift 1
elif [ "$1" = "-h" -o "$1" = "-H" -o "$1" = "--help" -o $# = 0 ]; then
usage
exit 0
else
break
fi
done
unset MTIME
if [ -n "${SOURCE_DATE_EPOCH}" ]; then
MTIME="--clamp-mtime --mtime=@${SOURCE_DATE_EPOCH}"
fi
PACKAGE_NAME="$1"
TARGET_NAME="$(dirname $PACKAGE_NAME)"
PACKAGE_NAME="$(basename $PACKAGE_NAME)"
# Identify package extension and compression type to use:
if [ ! "$(basename $PACKAGE_NAME .tgz)" = "$PACKAGE_NAME" ]; then
EXTENSION="tgz"
COMPEXT="gz"
COMPRESSOR="gzip ${COMPRESS_OPTION} -cn"
if ! which gzip 1> /dev/null 2> /dev/null ; then
echo "ERROR: gzip compression utility not found in \$PATH."
exit 3
fi
elif [ ! "$(basename $PACKAGE_NAME .tar.gz)" = "$PACKAGE_NAME" ]; then
EXTENSION="tar.gz"
COMPRESSOR="gzip ${COMPRESS_OPTION} -cn"
if ! which gzip 1> /dev/null 2> /dev/null ; then
echo "ERROR: gzip compression utility not found in \$PATH."
exit 3
fi
elif [ ! "$(basename $PACKAGE_NAME .tbz)" = "$PACKAGE_NAME" ]; then
EXTENSION="tbz"
if which lbzip2 1> /dev/null 2> /dev/null ; then
COMPRESSOR="lbzip2 ${COMPRESS_OPTION} -c"
else
if which bzip2 1> /dev/null 2> /dev/null ; then
COMPRESSOR="bzip2 ${COMPRESS_OPTION} -c"
else
echo "ERROR: bzip2 compression utility not found in \$PATH."
exit 3
fi
fi
elif [ ! "$(basename $PACKAGE_NAME .tar.bz2)" = "$PACKAGE_NAME" ]; then
EXTENSION="tar.bz2"
if which lbzip2 1> /dev/null 2> /dev/null ; then
COMPRESSOR="lbzip2 ${COMPRESS_OPTION} -c"
else
if which bzip2 1> /dev/null 2> /dev/null ; then
COMPRESSOR="bzip2 ${COMPRESS_OPTION} -c"
else
echo "ERROR: bzip2 compression utility not found in \$PATH."
exit 3
fi
fi
elif [ ! "$(basename $PACKAGE_NAME .tlz)" = "$PACKAGE_NAME" ]; then
EXTENSION="tlz"
if which plzip 1> /dev/null 2> /dev/null ; then
COMPRESSOR="plzip ${COMPRESS_OPTION} --threads=${THREADS} -c"
else
echo "WARNING: plzip compression utility not found in \$PATH."
echo "WARNING: package will not support multithreaded decompression."
if which lzip 1> /dev/null 2> /dev/null ; then
COMPRESSOR="lzip ${COMPRESS_OPTION} -c"
else
echo "ERROR: lzip compression utility not found in \$PATH."
exit 3
fi
fi
elif [ ! "$(basename $PACKAGE_NAME .tar.lz)" = "$PACKAGE_NAME" ]; then
EXTENSION="tar.lz"
if which plzip 1> /dev/null 2> /dev/null ; then
COMPRESSOR="plzip ${COMPRESS_OPTION} --threads=${THREADS} -c"
else
echo "WARNING: plzip compression utility not found in \$PATH."
echo "WARNING: package will not support multithreaded decompression."
if which lzip 1> /dev/null 2> /dev/null ; then
COMPRESSOR="lzip ${COMPRESS_OPTION} -c"
else
echo "ERROR: lzip compression utility not found in \$PATH."
exit 3
fi
fi
elif [ ! "$(basename $PACKAGE_NAME .tar.lzma)" = "$PACKAGE_NAME" ]; then
EXTENSION="tar.lzma"
COMPRESSOR="lzma ${COMPRESS_OPTION} -c"
if ! which lzma 1> /dev/null 2> /dev/null ; then
echo "ERROR: lzma compression utility not found in \$PATH."
exit 3
fi
elif [ ! "$(basename $PACKAGE_NAME .txz)" = "$PACKAGE_NAME" ]; then
EXTENSION="txz"
if [ ! "$XZ_THREADS_FORCED" = "yes" ]; then
# Two threads by default with xz due to memory failures on 32-bit. Not that
# it matters much... if upstream ever gets around to implementing multi-
# threaded decompression we'll revisit this default. :-D
COMPRESSOR="xz ${COMPRESS_OPTION} --threads=2 -c"
else
COMPRESSOR="xz ${COMPRESS_OPTION} --threads=${THREADS} -c"
fi
if ! which xz 1> /dev/null 2> /dev/null ; then
echo "ERROR: xz compression utility not found in \$PATH."
exit 3
fi
elif [ ! "$(basename $PACKAGE_NAME .tar.xz)" = "$PACKAGE_NAME" ]; then
EXTENSION="tar.xz"
if [ ! "$XZ_THREADS_FORCED" = "yes" ]; then
# Two threads by default with xz due to memory failures on 32-bit. Not that
# it matters much... if upstream ever gets around to implementing multi-
# threaded decompression we'll revisit this default. :-D
COMPRESSOR="xz ${COMPRESS_OPTION} --threads=2 -c"
else
COMPRESSOR="xz ${COMPRESS_OPTION} --threads=${THREADS} -c"
fi
if ! which xz 1> /dev/null 2> /dev/null ; then
echo "ERROR: xz compression utility not found in \$PATH."
exit 3
fi
else
EXTENSION="$(echo $PACKAGE_NAME | rev | cut -f 1 -d . | rev)"
echo "ERROR: Package extension .$EXTENSION is not supported."
exit 1
fi
TAR_NAME="$(basename $PACKAGE_NAME .$EXTENSION)"
# Sanity check -- we can't make the package in the current directory:
if [ "$CWD" = "$TARGET_NAME" -o "." = "$TARGET_NAME" ]; then
echo "ERROR: Can't make output package in current directory."
exit 2
fi
echo
echo "Slackware package maker, version 3.14159265."
echo
echo "Searching for symbolic links:"
# Get rid of possible pre-existing trouble:
INST=$(mktemp $TMP/makepkg.XXXXXX)
# Escape some characters in symlink names:
find . -type l -printf "%p\t%l\n" | LC_COLLATE=C sort | sed 's,^\./,,; s,[ "#$&\x27()*;<>?[\\`{|~],\\&,g;' | tee $INST
if [ ! "$(cat $INST)" = "" ]; then
echo
echo "Making symbolic link creation script:"
make_install_script $INST | tee doinst.sh
fi
echo
if [ ! "$(cat $INST)" = "" ]; then
if [ -r install/doinst.sh ]; then
echo "Unless your existing installation script already contains the code"
echo "to create these links, you should append these lines to your existing"
echo "install script. Now's your chance. :^)"
echo
echo "Would you like to add this stuff to the existing install script and"
echo -n "remove the symbolic links ([y]es, [n]o)? "
else
echo "It is recommended that you make these lines your new installation script."
echo
echo "Would you like to make this stuff the install script for this package"
echo -n "and remove the symbolic links ([y]es, [n]o)? "
fi
if [ ! "$LINKADD" ]; then
read LINKADD;
echo
else
echo $LINKADD
echo
fi
if [ "$LINKADD" = "y" ]; then
if [ -r install/doinst.sh ]; then
UPDATE="t"
if [ "$PREPEND" = "y" ]; then
touch install/doinst.sh
mv install/doinst.sh install/doinst.sh.shipped
cat doinst.sh > install/doinst.sh
echo "" >> install/doinst.sh
cat install/doinst.sh.shipped >> install/doinst.sh
rm -f install/doinst.sh.shipped
else
cat doinst.sh >> install/doinst.sh
fi
else
mkdir -p install
cat doinst.sh > install/doinst.sh
fi
echo
echo "Removing symbolic links:"
find . -type l -exec rm -v {} \;
echo
if [ "$UPDATE" = "t" ]; then
if [ "$PREPEND" = "y" ]; then
echo "Updating your ./install/doinst.sh (prepending symlinks)..."
else
echo "Updating your ./install/doinst.sh..."
fi
else
echo "Creating your new ./install/doinst.sh..."
fi
fi
else
echo "No symbolic links were found, so we won't make an installation script."
echo "You can make your own later in ./install/doinst.sh and rebuild the"
echo "package if you like."
fi
rm -f doinst.sh $INST
echo
echo "This next step is optional - you can set the directories in your package"
echo "to some sane permissions. If any of the directories in your package have"
echo "special permissions, then DO NOT reset them here!"
echo
echo "Would you like to reset all directory permissions to 755 (drwxr-xr-x) and"
echo -n "directory ownerships to root.root ([y]es, [n]o)? "
if [ ! "$CHOWN" ]; then
read CHOWN;
echo
else
echo $CHOWN
echo
fi
if [ "$CHOWN" = "y" ]; then
find . -type d -exec chmod -v 755 {} \;
find . -type d -exec chown -v root.root {} \;
fi
# Ensure that the 'root' of the package is chmod 755 because
# the / of your filesystem will inherit these permissions.
# If it's anything tighter than 755 then bad things happen such as users
# not being able to login, users already logged in can no longer run commands
# and so on.
OLDROOTPERMS="$(find -name . -printf "%m\n")"
if [ $OLDROOTPERMS -ne 755 ]; then
echo "WARNING: $PWD is chmod $OLDROOTPERMS"
echo " temporarily changing to chmod 755"
chmod 755 .
fi
echo "Creating Slackware package: ${TARGET_NAME}/${TAR_NAME}.${EXTENSION}"
echo
rm -f ${TARGET_NAME}/${TAR_NAME}.${EXTENSION}
# HISTORICAL NOTE 2/2018:
# In the interest of maximizing portability of this script, we'll use find
# and sed to create a filelist compatible with tar-1.13, and then use a
# more modern tar version to create the archive.
#
# Other (but possibly less portable) ways to achieve the same result:
#
# Use the tar --transform and --show-transformed-names options:
# tar --transform "s,^\./\(.\),\1," --show-transformed-names $ACLS $XATTRS -cvf - . | $COMPRESSOR > ${TARGET_NAME}/${TAR_NAME}.${EXTENSION}
#
# Use cpio:
# find ./ | sed '2,$s,^\./,,' | cpio --quiet -ovHustar > ${TARGET_NAME}/${TAR_NAME}.tar
# Create the package:
find ./ | LC_COLLATE=C sort | sed '2,$s,^\./,,' | tar --no-recursion $ACLS $XATTRS $MTIME -T - -cvf - | $COMPRESSOR > ${TARGET_NAME}/${TAR_NAME}.${EXTENSION}
ERRCODE=$?
if [ ! $ERRCODE = 0 ]; then
echo "ERROR: $COMPRESSOR returned error code $ERRCODE -- makepkg failed."
exit 1
fi
# Warn of zero-length files:
find . -type f -size 0c | cut -b3- | sed "s/^/WARNING: zero length file /g"
# Warn of corrupt or empty gzip files:
find . -type f -name '*.gz' | while read file ; do
if ! gzip -t $file 1> /dev/null 2> /dev/null ; then
echo "WARNING: gzip test failed on $(echo $file | cut -b3-)"
else
if [ "$(gzip -l $file | tail -n 1 | tr -s ' ' | cut -f 3 -d ' ')" -eq 0 ]; then
echo "WARNING: $(echo $file | cut -b3-) is an empty gzipped file"
fi
fi
done
# Some more handy warnings:
if [ -d usr/share/man ]; then
echo "WARNING: /usr/share/man (with possibly not gzipped man pages) detected"
fi
if [ -d usr/share/info ]; then
echo "WARNING: /usr/share/info (with possibly not gzipped info pages) detected"
fi
if find . | grep site_perl 1> /dev/null ; then
echo "WARNING: site_perl directory detected (this is fine for a local package build)"
fi
# Restore the old permissions if they previously weren't chmod 755
if [ $OLDROOTPERMS -ne 755 ]; then
echo
echo "Restoring permissions of $PWD to chmod $OLDROOTPERMS"
chmod $OLDROOTPERMS .
fi
echo
echo "Slackware package ${TARGET_NAME}/${TAR_NAME}.${EXTENSION} created."
echo
+103
View File
@@ -0,0 +1,103 @@
#!/bin/bash
# LEGACY SCRIPT - Kept for validation purposes. If still present after May 2025, delete.
# passes `shellcheck` and `shfmt -i 2`
[[ "$1" == "s" ]] && env=staging
[[ "$1" == "p" ]] && env=production
[[ -z "${env}" ]] && echo "usage: [s|p]" && exit 1
# If we have a second parameter, it's the PR number (for Pull request builds)
[[ -n "$2" ]] && PR="$2" || PR=""
DIR=$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")
MAINDIR=$(dirname "$(dirname "${DIR}")")
tmpdir=$(mktemp -d)
pluginSrc=$(basename "${DIR}")
plugin="${pluginSrc}"
version=$(date +"%Y.%m.%d.%H%M")
plgfile="${MAINDIR}/plugins/${plugin}.plg"
txzfile="${MAINDIR}/archive/${plugin}-${version}.txz"
# create txz package
mkdir -p "$(dirname "${txzfile}")"
mkdir -p "${tmpdir}"
# shellcheck disable=SC2046
cp --parents -f $(find . -type f ! \( -iname ".DS_Store" -o -iname "pkg_build.sh" -o -iname "makepkg" -o -iname "explodepkg" -o -iname "sftp-config.json" \)) "${tmpdir}/"
cd "${tmpdir}" || exit 1
chmod 0755 -R .
sudo chown root:root -R .
sudo "${MAINDIR}/source/dynamix.unraid.net/makepkg" -l y -c y "${txzfile}"
sudo rm -rf "${tmpdir}"
md5=$(md5sum "${txzfile}" | cut -f 1 -d ' ')
echo "MD5: ${md5}"
sha256=$(sha256sum "${txzfile}" | cut -f 1 -d ' ')
echo "SHA256: ${sha256}"
# test txz package
mkdir -p "${tmpdir}"
cd "${tmpdir}" || exit 1
RET=$(sudo "${MAINDIR}/source/dynamix.unraid.net/explodepkg" "${txzfile}" 2>&1 >/dev/null)
sudo rm -rf "${tmpdir}"
[[ "${RET}" != "" ]] && echo "Error: invalid txz package created: ${txzfile}" && exit 1
cd "${DIR}" || exit 1
# define vars for plg
PLUGIN_URL="https://stable.dl.unraid.net/unraid-api/\&name;.plg"
MAIN_TXZ="https://stable.dl.unraid.net/unraid-api/${plugin}-${version}.txz"
API_TGZ="https://stable.dl.unraid.net/unraid-api/unraid-api-${API_VERSION}.tgz"
# Check if PR is set, use a different path if so
if [[ -n "${PR}" ]]; then
MAIN_TXZ="https://preview.dl.unraid.net/unraid-api/pr/${PR}/${plugin}-${version}.txz"
API_TGZ="https://preview.dl.unraid.net/unraid-api/pr/${PR}/unraid-api-${API_VERSION}.tgz"
PLUGIN_URL="https://preview.dl.unraid.net/unraid-api/pr/${PR}/${plugin}.plg"
elif [[ "${env}" == "staging" ]]; then
PLUGIN_URL="https://preview.dl.unraid.net/unraid-api/\&name;.plg"
MAIN_TXZ="https://preview.dl.unraid.net/unraid-api/${plugin}-${version}.txz"
API_TGZ="https://preview.dl.unraid.net/unraid-api/unraid-api-${API_VERSION}.tgz"
fi
# update plg file
sed -i -E "s#(ENTITY name\s*)\".*\"#\1\"${plugin}\"#g" "${plgfile}"
sed -i -E "s#(ENTITY env\s*)\".*\"#\1\"${env}\"#g" "${plgfile}"
sed -i -E "s#(ENTITY version\s*)\".*\"#\1\"${version}\"#g" "${plgfile}"
sed -i -E "s#(ENTITY pluginURL\s*)\".*\"#\1\"${PLUGIN_URL}\"#g" "${plgfile}"
sed -i -E "s#(ENTITY SHA256\s*)\".*\"#\1\"${sha256}\"#g" "${plgfile}"
sed -i -E "s#(ENTITY MAIN_TXZ\s*)\".*\"#\1\"${MAIN_TXZ}\"#g" "${plgfile}"
sed -i -E "s#(ENTITY API_TGZ\s*)\".*\"#\1\"${API_TGZ}\"#g" "${plgfile}"
sed -i -E "s#(ENTITY PR\s*)\".*\"#\1\"${PR}\"#g" "${plgfile}"
# set from environment vars
sed -i -E "s#(ENTITY API_version\s*)\".*\"#\1\"${API_VERSION}\"#g" "${plgfile}"
sed -i -E "s#(ENTITY API_SHA256\s*)\".*\"#\1\"${API_SHA256}\"#g" "${plgfile}"
# validate that all ENTITY values are present
required_entities=("name" "env" "version" "pluginURL" "SHA256" "MAIN_TXZ" "API_TGZ" "NODEJS_FILENAME" "NODEJS_SHA256" "NODEJS_TXZ" "API_version" "API_SHA256")
validation_failed=false
for entity in "${required_entities[@]}"; do
entity_value=$(grep -oP "ENTITY ${entity} \"\K[^\"]*" "${plgfile}" || echo "")
if [[ -z "${entity_value}" ]]; then
echo "Error: ENTITY ${entity} was not replaced correctly in ${plgfile}"
validation_failed=true
elif [[ "${entity_value}" =~ ^[[:space:]]*$ ]]; then
echo "Error: ENTITY ${entity} has an empty value in ${plgfile}"
validation_failed=true
fi
done
if [[ "${validation_failed}" == "true" ]]; then
if [[ -f "${plgfile}.bak" ]]; then
echo "Restoring backup due to validation failure"
mv "${plgfile}.bak" "${plgfile}"
fi
exit 1
fi
# add changelog for major versions
# sed -i "/<CHANGES>/a ###${version}\n" ${plgfile}
echo
grep -E "ENTITY (name|PLUGIN_URL|env|version|MD5|SHA256|node_api_version|MAIN_TXZ|API_TGZ)" "${plgfile}"
echo
echo "${env} plugin: ${plgfile}"
echo "${env} txz: ${txzfile}"