mirror of
https://github.com/unraid/api.git
synced 2026-01-02 14:40:01 -06:00
Compare commits
2 Commits
release-pl
...
feat/notif
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d0db8a098d | ||
|
|
94dfe85716 |
1
.github/CODEOWNERS
vendored
1
.github/CODEOWNERS
vendored
@@ -1 +0,0 @@
|
||||
@elibosley @pujitm @mdatelle @zackspear
|
||||
45
.github/ISSUE_TEMPLATE/bug_report.md
vendored
45
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,45 +0,0 @@
|
||||
---
|
||||
name: Bug Report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: bug
|
||||
assignees: ''
|
||||
---
|
||||
|
||||
<!--
|
||||
IMPORTANT: If your issue is related to Unraid Connect features (Flash Backup, connect.myunraid.net, mothership errors with connectivity, etc.) please submit a ticket here: [LINK TO FRESHDESK FORM FOR CONNECT] and choose Unraid Connect in the dropdown.
|
||||
-->
|
||||
|
||||
## Environment
|
||||
|
||||
**Unraid OS Version:**
|
||||
<!-- Please specify your Unraid version (e.g. 7.0.0) -->
|
||||
|
||||
**Are you using a reverse proxy?**
|
||||
<!-- Please answer Yes/No. If yes, have you tested the issue by accessing your server directly? -->
|
||||
<!-- Note: Reverse proxies are not officially supported by Unraid and can cause issues with various components of Unraid OS -->
|
||||
|
||||
## Pre-submission Checklist
|
||||
<!-- Please check all that apply by replacing [ ] with [x] -->
|
||||
|
||||
- [ ] I have verified that my Unraid OS is up to date
|
||||
- [ ] I have tested this issue by accessing my server directly (not through a reverse proxy)
|
||||
- [ ] This is not an Unraid Connect related issue (if it is, please submit via the support form instead)
|
||||
|
||||
## Issue Description
|
||||
<!-- Please provide a clear and concise description of the issue -->
|
||||
|
||||
## Steps to Reproduce
|
||||
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
## Expected Behavior
|
||||
<!-- What did you expect to happen? -->
|
||||
|
||||
## Actual Behavior
|
||||
<!-- What actually happened? -->
|
||||
|
||||
## Additional Context
|
||||
<!-- Add any other context, screenshots, or error messages about the problem here -->
|
||||
34
.github/ISSUE_TEMPLATE/feature_request.md
vendored
34
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -1,34 +0,0 @@
|
||||
---
|
||||
name: Feature Request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: enhancement
|
||||
assignees: ''
|
||||
---
|
||||
|
||||
<!--
|
||||
IMPORTANT: If your feature request is related to Unraid Connect features (Flash Backup, connect.myunraid.net, etc.) please submit it here: [LINK TO FRESHDESK FORM FOR CONNECT] and choose Unraid Connect in the dropdown.
|
||||
-->
|
||||
|
||||
## Is your feature request related to a problem?
|
||||
<!-- A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] -->
|
||||
|
||||
## Describe the solution you'd like
|
||||
<!-- A clear and concise description of what you want to happen -->
|
||||
|
||||
## Describe alternatives you've considered
|
||||
<!-- A clear and concise description of any alternative solutions or features you've considered -->
|
||||
|
||||
## Additional context
|
||||
<!-- Add any other context, mockups, or screenshots about the feature request here -->
|
||||
|
||||
## Environment (if relevant)
|
||||
**Unraid OS Version:**
|
||||
<!-- Please specify your Unraid version (e.g. 7.0.0) if the feature request is version-specific -->
|
||||
|
||||
## Pre-submission Checklist
|
||||
<!-- Please check all that apply by replacing [ ] with [x] -->
|
||||
|
||||
- [ ] I have searched existing issues to ensure this feature hasn't already been requested
|
||||
- [ ] This is not an Unraid Connect related feature (if it is, please submit via the support form instead)
|
||||
- [ ] I have provided clear examples or use cases for the feature
|
||||
41
.github/ISSUE_TEMPLATE/work_intent.md
vendored
41
.github/ISSUE_TEMPLATE/work_intent.md
vendored
@@ -1,41 +0,0 @@
|
||||
---
|
||||
name: Work Intent
|
||||
about: Request approval for planned development work (must be approved before starting)
|
||||
title: 'Work Intent: '
|
||||
labels: work-intent, unapproved
|
||||
assignees: ''
|
||||
---
|
||||
|
||||
<!--
|
||||
IMPORTANT: This work intent must be approved by a core developer before beginning any development work.
|
||||
The 'unapproved' label will be removed once approved.
|
||||
-->
|
||||
|
||||
## Overview
|
||||
<!-- Provide a high-level description of what you want to work on and why -->
|
||||
|
||||
## Technical Approach
|
||||
<!-- Brief description of how you plan to implement this -->
|
||||
|
||||
## Scope
|
||||
<!-- Check components that will be modified -->
|
||||
- [ ] API
|
||||
- [ ] Plugin
|
||||
- [ ] Web UI
|
||||
- [ ] Build/Deploy Process
|
||||
- [ ] Documentation
|
||||
|
||||
## Timeline & Impact
|
||||
<!-- Quick details about timing and effects -->
|
||||
- Estimated time needed:
|
||||
- Potential impacts:
|
||||
|
||||
## Pre-submission Checklist
|
||||
<!-- Please check all that apply -->
|
||||
- [ ] I have searched for similar work/issues
|
||||
- [ ] I understand this needs approval before starting
|
||||
- [ ] I am willing to make adjustments based on feedback
|
||||
|
||||
<!--
|
||||
For Reviewers: Remove 'unapproved' label and add 'approved' label if accepted
|
||||
-->
|
||||
1
.github/unraid.svg
vendored
1
.github/unraid.svg
vendored
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" version="1.2" viewBox="0 0 1000 1000"><defs><linearGradient id="a" x1="-900" x2="-100" y1="-100" y2="-900" gradientUnits="userSpaceOnUse"><stop offset="0" stop-color="#e32929"/><stop offset="1" stop-color="#ff8d30"/></linearGradient></defs><path fill="url(#a)" d="M1000 500.1v376.4c0 57.5-43.4 110.1-100 120.9-8.4 1.6-17.1 2.5-25.6 2.5-250.1.1-500.2.1-750.3.1-61.3 0-114.8-47-122.8-108q-.3-2.1-.6-4.1-.2-2-.3-4.1-.2-2-.3-4v-4.1C0 624.9 0 374.2 0 123.5 0 66 43.4 13.3 100 2.6 108.4 1 117.1.1 125.6.1 375.9 0 626.2 0 876.5 0 934 0 986.7 43.4 997.4 100c1.5 8.4 2.5 17.1 2.5 25.6.1 124.8.1 249.7.1 374.5z"/><path fill="#fff" d="M481.6 392.1h36.5v216.2h-36.5zm-356 0h36.5v216.2h-36.5zm178 242h36.5v82.5h-36.5zm-89.3-92.7h36.5v133.7h-36.5zm178 0h36.5V675h-36.5zm445.8-149.3h36.5v216.1h-36.5zm-178-107.8h36.5v82.6h-36.5zm89.3 41.5h36.5v133.1h-36.5zm-178.6 0h36.5v133h-36.5z"/></svg>
|
||||
|
Before Width: | Height: | Size: 915 B |
59
.github/workflows/create-docusaurus-pr.yml
vendored
59
.github/workflows/create-docusaurus-pr.yml
vendored
@@ -1,59 +0,0 @@
|
||||
name: Update API Documentation
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'api/docs/**'
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
# Add permissions for GITHUB_TOKEN
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
jobs:
|
||||
create-docs-pr:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout source repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
path: source-repo
|
||||
|
||||
- name: Checkout docs repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: unraid/docs
|
||||
path: docs-repo
|
||||
token: ${{ secrets.DOCS_PAT_UNRAID_BOT }}
|
||||
|
||||
- name: Copy updated docs
|
||||
run: |
|
||||
if [ ! -d "source-repo/api/docs" ]; then
|
||||
echo "Source directory does not exist!"
|
||||
exit 1
|
||||
fi
|
||||
rm -rf docs-repo/docs/API/
|
||||
mkdir -p docs-repo/docs/API
|
||||
cp -r source-repo/api/docs/public/. docs-repo/docs/API/
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.DOCS_PAT_UNRAID_BOT }}
|
||||
path: docs-repo
|
||||
commit-message: 'docs: update API documentation'
|
||||
title: 'Update API Documentation'
|
||||
body: |
|
||||
This PR updates the API documentation based on changes from the main repository.
|
||||
|
||||
Changes were automatically generated from api/docs/* directory.
|
||||
|
||||
@coderabbitai ignore
|
||||
reviewers: ljm42, elibosley, pujitm, mdatelle
|
||||
branch: update-api-docs
|
||||
base: main
|
||||
delete-branch: true
|
||||
454
.github/workflows/main.yml
vendored
454
.github/workflows/main.yml
vendored
@@ -5,6 +5,8 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
tags:
|
||||
- "v*"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
@@ -12,8 +14,7 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
release-please:
|
||||
# Only run release-please on pushes to main
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
@@ -24,120 +25,75 @@ jobs:
|
||||
outputs:
|
||||
releases_created: ${{ steps.release.outputs.releases_created }}
|
||||
tag_name: ${{ steps.release.outputs.tag_name }}
|
||||
test-api:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: api
|
||||
start:
|
||||
# This prevents a tag running twice as it'll have a "tag" and a "commit" event
|
||||
# We only want the tag to run the action as it'll be able to create the release notes
|
||||
if: (startsWith(github.event.ref, 'refs/heads/') && !startsWith(github.event.head_commit.message, 'chore(release)')) || (startsWith(github.event.ref, 'refs/tags/') && startsWith(github.event.head_commit.message, 'chore(release)'))
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
- name: Validate branch and tag
|
||||
run: exit 0
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: PNPM Install
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Lint
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Test
|
||||
run: pnpm run coverage
|
||||
|
||||
build-api:
|
||||
build-test-api:
|
||||
name: Build and Test API
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: api
|
||||
outputs:
|
||||
API_VERSION: ${{ steps.vars.outputs.API_VERSION }}
|
||||
API_MD5: ${{ steps.set-hashes.outputs.API_MD5 }}
|
||||
API_SHA256: ${{ steps.set-hashes.outputs.API_SHA256 }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
- name: Build with Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
install: true
|
||||
platforms: linux/amd64
|
||||
- name: Build Builder
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
|
||||
- name: PNPM Install
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: Lint
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Type Check
|
||||
run: pnpm run type-check
|
||||
context: ./api
|
||||
push: false
|
||||
tags: builder:latest
|
||||
cache-from: type=gha,ref=builder:latest
|
||||
cache-to: type=gha,mode=max,ref=builder:latest
|
||||
load: true
|
||||
- name: Lint inside of the docker container
|
||||
continue-on-error: true
|
||||
run: |
|
||||
docker run --rm builder npm run lint
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build
|
||||
|
||||
- name: Test inside of the docker container
|
||||
run: |
|
||||
git fetch --depth=2 origin main
|
||||
if git diff --name-only --relative=api origin/main HEAD | grep -q '.'; then
|
||||
docker run --rm builder npm run coverage
|
||||
else
|
||||
echo "No changes in /api folder, skipping coverage."
|
||||
fi
|
||||
- name: Get Git Short Sha and API version
|
||||
id: vars
|
||||
run: |
|
||||
GIT_SHA=$(git rev-parse --short HEAD)
|
||||
IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '')
|
||||
PACKAGE_LOCK_VERSION=$(jq -r '.version' package.json)
|
||||
API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")
|
||||
export API_VERSION
|
||||
PACKAGE_LOCK_VERSION=$(jq -r '.version' package-lock.json)
|
||||
echo "GIT_SHA=$GIT_SHA" >> $GITHUB_OUTPUT
|
||||
echo "IS_TAGGED=$IS_TAGGED" >> $GITHUB_OUTPUT
|
||||
echo "PACKAGE_LOCK_VERSION=$PACKAGE_LOCK_VERSION" >> $GITHUB_OUTPUT
|
||||
echo "API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")" >> $GITHUB_OUTPUT
|
||||
- name: Build inside of the docker container
|
||||
id: build-pack-binary
|
||||
run: |
|
||||
docker run --rm -v ${{ github.workspace }}/api/deploy/release:/app/deploy/release -e API_VERSION=${{ steps.vars.outputs.API_VERSION }} builder npm run build-and-pack
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build-and-pack
|
||||
- name: Set Hashes
|
||||
id: set-hashes
|
||||
run: |
|
||||
echo "API_MD5=$(md5sum ${{ github.workspace }}/api/deploy/release/*.tgz | awk '{ print $1 }')" >> $GITHUB_OUTPUT
|
||||
echo "API_SHA256=$(sha256sum ${{ github.workspace }}/api/deploy/release/*.tgz | awk '{ print $1 }')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Upload tgz to Github artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
@@ -145,62 +101,7 @@ jobs:
|
||||
name: unraid-api
|
||||
path: ${{ github.workspace }}/api/deploy/release/*.tgz
|
||||
|
||||
build-unraid-ui-webcomponents:
|
||||
name: Build Unraid UI Library (Webcomponent Version)
|
||||
defaults:
|
||||
run:
|
||||
working-directory: unraid-ui
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile --filter @unraid/ui
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build:wc
|
||||
|
||||
- name: Upload Artifact to Github
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-wc-ui
|
||||
path: unraid-ui/dist/
|
||||
|
||||
build-web:
|
||||
# needs: [build-unraid-ui]
|
||||
name: Build Web App
|
||||
environment:
|
||||
name: production
|
||||
@@ -221,59 +122,34 @@ jobs:
|
||||
echo VITE_CALLBACK_KEY=${{ vars.VITE_CALLBACK_KEY }} >> .env
|
||||
cat .env
|
||||
|
||||
- name: Install Node
|
||||
- name: Install node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: "npm"
|
||||
cache-dependency-path: "web/package-lock.json"
|
||||
node-version-file: "web/.nvmrc"
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
- name: Installing node deps
|
||||
run: npm install
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: PNPM Install
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile --filter @unraid/web --filter @unraid/ui
|
||||
|
||||
- name: Build Unraid UI
|
||||
run: |
|
||||
cd ${{ github.workspace }}/unraid-ui
|
||||
pnpm run build
|
||||
- name: Lint files
|
||||
continue-on-error: true
|
||||
run: pnpm run lint
|
||||
run: npm run lint
|
||||
|
||||
- name: Test
|
||||
run: pnpm run test:ci
|
||||
run: npm run test:ci
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build
|
||||
run: npm run build
|
||||
|
||||
- name: Upload build to Github artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-wc-rich
|
||||
name: unraid-web
|
||||
path: web/.nuxt/nuxt-custom-elements/dist/unraid-components
|
||||
|
||||
build-plugin:
|
||||
needs: [build-api, build-web, build-unraid-ui-webcomponents]
|
||||
outputs:
|
||||
tag: ${{ steps.build-plugin.outputs.tag }}
|
||||
needs: [build-test-api, build-web]
|
||||
defaults:
|
||||
run:
|
||||
working-directory: plugin
|
||||
@@ -285,78 +161,80 @@ jobs:
|
||||
timezoneLinux: "America/Los_Angeles"
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile --filter @unraid/connect-plugin
|
||||
|
||||
- name: Download Unraid Web Components
|
||||
- name: Download unraid web components
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: unraid-wc-*
|
||||
name: unraid-web
|
||||
path: ./plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components
|
||||
merge-multiple: true
|
||||
- name: Download Unraid API
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: unraid-api
|
||||
path: /tmp/unraid-api/
|
||||
- name: Extract Unraid API and Build Plugin
|
||||
id: build-plugin
|
||||
- name: Download Node.js From Slackbuilds (skipped due to node.js issues)
|
||||
if: false
|
||||
id: download-nodejs
|
||||
run: |
|
||||
tar -xzf /tmp/unraid-api/unraid-api.tgz -C ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/unraid-api
|
||||
cd ${{ github.workspace }}/plugin
|
||||
|
||||
if [ -n "${{ github.event.pull_request.number }}" ]; then
|
||||
export TAG=PR${{ github.event.pull_request.number }}
|
||||
# Put tag into github env
|
||||
echo "TAG=${TAG}" >> $GITHUB_OUTPUT
|
||||
# Get latest node version (based on main_node_version) from slackware
|
||||
main_node_version=$(sed 's/^v//' ../api/.nvmrc)
|
||||
base_node_url="https://mirrors.slackware.com/slackware/slackware64-current/slackware64/l/"
|
||||
latest_nodejs=$(wget -q -O- "${base_node_url}" | grep -o "nodejs-${main_node_version}\.[0-9.]*-x86_64-[0-9]*\.txz" | sort -V | tail -n 1)
|
||||
if [[ -z "${latest_nodejs}" ]]; then
|
||||
echo "Error: Failed to fetch the latest nodejs version."
|
||||
exit 1
|
||||
fi
|
||||
node_download_url="${base_node_url}${latest_nodejs}"
|
||||
if ! wget -q "${node_download_url}" -O "${{ github.workspace }}/plugin/archive/${latest_nodejs}"; then
|
||||
echo "Error: Failed to download nodejs package."
|
||||
exit 1
|
||||
fi
|
||||
node_sha256=$(sha256sum "${{ github.workspace }}/plugin/archive/${latest_nodejs}" | cut -f 1 -d ' ')
|
||||
echo "NODEJS_FILENAME=${latest_nodejs}" >> $GITHUB_OUTPUT
|
||||
echo "NODEJS_SHA256=${node_sha256}" >> $GITHUB_OUTPUT
|
||||
- name: Download nghttp3
|
||||
id: download-nghttp3
|
||||
run: |
|
||||
# Get latest nghttp3 version
|
||||
base_nghttp3_url="https://mirrors.slackware.com/slackware/slackware64-current/slackware64/n/"
|
||||
latest_nghttp3=$(wget -q -O- "${base_nghttp3_url}" | grep -o "nghttp3-[0-9.]*-x86_64-[0-9]*\.txz" | sort -V | tail -n 1)
|
||||
nghttp3_download_url="${base_nghttp3_url}${latest_nghttp3}"
|
||||
if ! wget -q "${nghttp3_download_url}" -O "${{ github.workspace }}/plugin/archive/${latest_nghttp3}"; then
|
||||
echo "Error: Failed to download nghttp3 package."
|
||||
exit 1
|
||||
fi
|
||||
nghttp3_sha256=$(sha256sum "${{ github.workspace }}/plugin/archive/${latest_nghttp3}" | cut -f 1 -d ' ')
|
||||
echo "NGHTTP3_FILENAME=${latest_nghttp3}" >> $GITHUB_OUTPUT
|
||||
echo "NGHTTP3_SHA256=${nghttp3_sha256}" >> $GITHUB_OUTPUT
|
||||
- name: Build Plugin
|
||||
run: |
|
||||
cd source/dynamix.unraid.net
|
||||
export API_VERSION=${{needs.build-test-api.outputs.API_VERSION}}
|
||||
export API_MD5=${{needs.build-test-api.outputs.API_MD5}}
|
||||
export API_SHA256=${{needs.build-test-api.outputs.API_SHA256}}
|
||||
export NGHTTP3_FILENAME=${{ steps.download-nghttp3.outputs.NGHTTP3_FILENAME }}
|
||||
export NGHTTP3_SHA256=${{ steps.download-nghttp3.outputs.NGHTTP3_SHA256 }}
|
||||
if [ -z "${API_VERSION}" ] ||
|
||||
[ -z "${API_MD5}" ] ||
|
||||
[ -z "${API_SHA256}" ] ||
|
||||
[ -z "${NGHTTP3_FILENAME}" ] ||
|
||||
[ -z "${NGHTTP3_SHA256}" ]; then
|
||||
echo "Error: One or more required variables are not set."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
pnpm run build
|
||||
bash ./pkg_build.sh s ${{github.event.pull_request.number}}
|
||||
bash ./pkg_build.sh p
|
||||
- name: Upload binary txz and plg to Github artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: connect-files
|
||||
path: |
|
||||
plugin/deploy/release/plugins/
|
||||
plugin/deploy/release/archive/*.txz
|
||||
${{ github.workspace }}/plugin/archive/*.txz
|
||||
${{ github.workspace }}/plugin/plugins/*.plg
|
||||
retention-days: 5
|
||||
if-no-files-found: error
|
||||
|
||||
release-pull-request:
|
||||
if: |
|
||||
github.event_name == 'pull_request'
|
||||
github.event_name == 'pull_request' &&
|
||||
github.event.pull_request.base.ref == 'main'
|
||||
runs-on: ubuntu-latest
|
||||
needs: [test-api, build-plugin]
|
||||
needs: [build-plugin]
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
@@ -364,15 +242,35 @@ jobs:
|
||||
- name: Make PR Release Folder
|
||||
run: mkdir pr-release/
|
||||
|
||||
- name: Download unraid-api binary tgz
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: unraid-api
|
||||
path: pr-release
|
||||
|
||||
- name: Download plugin binary tgz
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: connect-files
|
||||
|
||||
- name: Write Changelog to Plugin XML
|
||||
run: |
|
||||
# Capture the pull request number and latest commit message
|
||||
pr_number="${{ github.event.pull_request.number }}"
|
||||
commit_message=$(git log -1 --pretty=%B)
|
||||
|
||||
# Clean up newlines, escape special characters, and handle line breaks
|
||||
notes=$(echo -e "Pull Request Build: ${pr_number}\n${commit_message}" | \
|
||||
sed ':a;N;$!ba;s/\n/\\n/g' | \
|
||||
sed -e 's/[&\\/]/\\&/g')
|
||||
|
||||
# Replace <CHANGES> tag content in the file
|
||||
sed -i -z -E "s/<CHANGES>(.*)<\/CHANGES>/<CHANGES>\n${notes}\n<\/CHANGES>/g" "plugins/dynamix.unraid.net.staging.plg"
|
||||
|
||||
- name: Copy other release files to pr-release
|
||||
run: |
|
||||
cp archive/*.txz pr-release/
|
||||
cp plugins/pr/dynamix.unraid.net.plg pr-release/dynamix.unraid.net.plg
|
||||
cp plugins/dynamix.unraid.net.staging.plg pr-release/
|
||||
|
||||
- name: Upload to Cloudflare
|
||||
uses: jakejarvis/s3-sync-action@v0.5.1
|
||||
@@ -383,18 +281,13 @@ jobs:
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
AWS_REGION: "auto"
|
||||
SOURCE_DIR: pr-release
|
||||
DEST_DIR: unraid-api/tag/${{ needs.build-plugin.outputs.tag }}
|
||||
DEST_DIR: unraid-api/pr/${{ github.event.pull_request.number }}
|
||||
- name: Comment URL
|
||||
uses: thollander/actions-comment-pull-request@v3
|
||||
with:
|
||||
comment-tag: prlink
|
||||
mode: recreate
|
||||
message: |
|
||||
This plugin has been deployed to Cloudflare R2 and is available for testing.
|
||||
Download it at this URL:
|
||||
```
|
||||
https://preview.dl.unraid.net/unraid-api/tag/${{ needs.build-plugin.outputs.tag }}/dynamix.unraid.net.plg
|
||||
```
|
||||
Download it at this URL: [https://preview.dl.unraid.net/unraid-api/pr/${{ github.event.pull_request.number }}/dynamix.unraid.net.staging.plg](https://preview.dl.unraid.net/unraid-api/pr/${{ github.event.pull_request.number }}/dynamix.unraid.net.staging.plg)
|
||||
|
||||
release-staging:
|
||||
environment:
|
||||
@@ -402,7 +295,7 @@ jobs:
|
||||
# Only release if this is a push to the main branch
|
||||
if: startsWith(github.ref, 'refs/heads/main')
|
||||
runs-on: ubuntu-latest
|
||||
needs: [test-api, build-plugin]
|
||||
needs: [build-plugin]
|
||||
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
@@ -411,17 +304,40 @@ jobs:
|
||||
- name: Make Staging Release Folder
|
||||
run: mkdir staging-release/
|
||||
|
||||
- name: Download unraid-api binary tgz
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: unraid-api
|
||||
path: staging-release
|
||||
|
||||
- name: Download plugin binary tgz
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: connect-files
|
||||
|
||||
- name: Parse Changelog
|
||||
id: changelog
|
||||
uses: ocavue/changelog-parser-action@v1
|
||||
with:
|
||||
removeMarkdown: false
|
||||
filePath: "./api/CHANGELOG.md"
|
||||
|
||||
- name: Copy Files for Staging Release
|
||||
run: |
|
||||
cp archive/*.txz staging-release/
|
||||
cp plugins/staging/dynamix.unraid.net.plg staging-release/dynamix.unraid.net.plg
|
||||
cp plugins/dynamix.unraid.net.staging.plg staging-release/
|
||||
ls -al staging-release
|
||||
|
||||
- name: Upload Staging Plugin to DO Spaces
|
||||
uses: BetaHuhn/do-spaces-action@v2
|
||||
with:
|
||||
access_key: ${{ secrets.DO_ACCESS_KEY }}
|
||||
secret_key: ${{ secrets.DO_SECRET_KEY }}
|
||||
space_name: ${{ secrets.DO_SPACE_NAME }}
|
||||
space_region: ${{ secrets.DO_SPACE_REGION }}
|
||||
source: staging-release
|
||||
out_dir: unraid-api
|
||||
|
||||
- name: Upload Staging Plugin to Cloudflare Bucket
|
||||
uses: jakejarvis/s3-sync-action@v0.5.1
|
||||
env:
|
||||
@@ -434,32 +350,34 @@ jobs:
|
||||
DEST_DIR: unraid-api
|
||||
|
||||
create-draft-release:
|
||||
# Only run if release-please created a release
|
||||
if: needs.release-please.outputs.releases_created == 'true'
|
||||
# Only create new draft if this is a version tag
|
||||
if: |
|
||||
startsWith(github.ref, 'refs/tags/v')
|
||||
runs-on: ubuntu-latest
|
||||
needs: [release-please, test-api, build-plugin]
|
||||
needs: [build-plugin]
|
||||
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download unraid-api binary tgz
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: unraid-api
|
||||
|
||||
- name: Download plugin binary tgz
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: connect-files
|
||||
|
||||
- name: Move Files to Release Folder
|
||||
run: |
|
||||
mkdir -p release/
|
||||
mv plugins/production/dynamix.unraid.net.plg release/
|
||||
mv archive/*.txz release/
|
||||
|
||||
- name: Upload Release Assets
|
||||
- name: Create Github release
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
draft: true
|
||||
prerelease: false
|
||||
files: |
|
||||
unraid-api-*.tgz
|
||||
plugins/dynamix.unraid.net*
|
||||
archive/*
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
release_name=$(gh release list --repo ${{ github.repository }} --json name,isDraft --jq '.[] | select(.isDraft == true) | .name' | head -n 1)
|
||||
# For each file in release directory
|
||||
for file in release/*; do
|
||||
echo "Uploading $file to release..."
|
||||
gh release upload "${release_name}" "$file" --clobber
|
||||
done
|
||||
|
||||
57
.github/workflows/push-staging-pr-on-close.yml
vendored
57
.github/workflows/push-staging-pr-on-close.yml
vendored
@@ -1,57 +0,0 @@
|
||||
name: Push Staging Plugin on PR Close
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- closed
|
||||
|
||||
jobs:
|
||||
push-staging:
|
||||
if: github.event.pull_request.merged == true
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Set Timezone
|
||||
uses: szenius/set-timezone@v1.2
|
||||
with:
|
||||
timezoneLinux: "America/Los_Angeles"
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: refs/pull/${{ github.event.pull_request.base.ref }}/merge
|
||||
|
||||
- name: Download artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: connect-files
|
||||
path: connect-files
|
||||
|
||||
- name: Update Downloaded Staging Plugin to New Date
|
||||
run: |
|
||||
if [ ! -f "connect-files/plugins/dynamix.unraid.net.pr.plg" ]; then
|
||||
echo "ERROR: dynamix.unraid.net.pr.plg not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
plgfile="connect-files/plugins/dynamix.unraid.net.pr.plg"
|
||||
version=$(date +"%Y.%m.%d.%H%M")
|
||||
sed -i -E "s#(<!ENTITY version \").*(\">)#\1${version}\2#g" "${plgfile}" || exit 1
|
||||
|
||||
# Change the plugin url to point to staging
|
||||
url="https://preview.dl.unraid.net/unraid-api/dynamix.unraid.net.plg"
|
||||
sed -i -E "s#(<!ENTITY pluginURL \").*(\">)#\1${url}\2#g" "${plgfile}" || exit 1
|
||||
cat "${plgfile}"
|
||||
mkdir -p pr-release
|
||||
mv "${plgfile}" pr-release/dynamix.unraid.net.plg
|
||||
|
||||
- name: Upload to Cloudflare
|
||||
uses: jakejarvis/s3-sync-action@v0.5.1
|
||||
env:
|
||||
AWS_S3_ENDPOINT: ${{ secrets.CF_ENDPOINT }}
|
||||
AWS_S3_BUCKET: ${{ secrets.CF_BUCKET_PREVIEW }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
AWS_REGION: "auto"
|
||||
SOURCE_DIR: pr-release
|
||||
DEST_DIR: unraid-api/pr/${{ github.event.pull_request.number }}
|
||||
3
.github/workflows/release-production.yml
vendored
3
.github/workflows/release-production.yml
vendored
@@ -33,6 +33,7 @@ jobs:
|
||||
)
|
||||
escapedNotes=$(sed -e 's/[&\\/]/\\&/g; s/$/\\/' -e '$s/\\$//' <<<"$notes")
|
||||
sed -i -z -E "s/<CHANGES>(.*)<\/CHANGES>/<CHANGES>\n${escapedNotes}\n<\/CHANGES>/g" "dynamix.unraid.net.plg"
|
||||
sed -i -z -E "s/<CHANGES>(.*)<\/CHANGES>/<CHANGES>\n${escapedNotes}\n<\/CHANGES>/g" "dynamix.unraid.net.staging.plg"
|
||||
|
||||
- name: Upload All Release Files to DO Spaces
|
||||
uses: BetaHuhn/do-spaces-action@v2
|
||||
@@ -53,4 +54,4 @@ jobs:
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
AWS_REGION: 'auto'
|
||||
SOURCE_DIR: "."
|
||||
DEST_DIR: unraid-api
|
||||
DEST_DIR: unraid-api
|
||||
71
.github/workflows/test-libvirt.yml
vendored
71
.github/workflows/test-libvirt.yml
vendored
@@ -1,71 +0,0 @@
|
||||
name: Test Libvirt
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "libvirt/**"
|
||||
pull_request:
|
||||
paths:
|
||||
- "libvirt/**"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./libvirt
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
with:
|
||||
packages: libvirt-dev
|
||||
version: 1.0
|
||||
|
||||
- name: Set Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('libvirt/package.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: pnpm install
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build
|
||||
|
||||
- name: test
|
||||
run: pnpm run test
|
||||
10
.gitignore
vendored
10
.gitignore
vendored
@@ -24,7 +24,6 @@ build/Release
|
||||
# Dependency directories
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
unraid-ui/node_modules/
|
||||
|
||||
# TypeScript v1 declaration files
|
||||
typings/
|
||||
@@ -52,7 +51,6 @@ typings/
|
||||
|
||||
# Visual Studio Code workspace
|
||||
.vscode/sftp.json
|
||||
.history/
|
||||
|
||||
# OSX
|
||||
.DS_Store
|
||||
@@ -65,7 +63,6 @@ test/__temp__/*
|
||||
|
||||
# Built files
|
||||
dist
|
||||
unraid-ui/storybook-static
|
||||
|
||||
# Typescript
|
||||
typescript
|
||||
@@ -76,7 +73,7 @@ typescript
|
||||
# Github actions
|
||||
RELEASE_NOTES.md
|
||||
|
||||
# Docker Deploy Folder
|
||||
# Docker Deploy Folder
|
||||
deploy/*
|
||||
!deploy/.gitkeep
|
||||
|
||||
@@ -91,7 +88,4 @@ deploy/*
|
||||
.env*
|
||||
!.env.example
|
||||
|
||||
fb_keepalive
|
||||
|
||||
# pnpm store
|
||||
.pnpm-store
|
||||
fb_keepalive
|
||||
@@ -1 +1 @@
|
||||
{"api":"4.1.2","web":"4.1.2","unraid-ui":"4.1.2","plugin":"4.1.2"}
|
||||
{"api":"3.10.0","web":"3.10.0"}
|
||||
2
.vscode/extensions.json
vendored
2
.vscode/extensions.json
vendored
@@ -1,6 +1,8 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"natizyskunk.sftp",
|
||||
"davidanson.vscode-markdownlint",
|
||||
"bmewburn.vscode-intelephense-client",
|
||||
"foxundermoon.shell-format",
|
||||
"timonwong.shellcheck",
|
||||
"esbenp.prettier-vscode"
|
||||
|
||||
47
.vscode/settings.json
vendored
47
.vscode/settings.json
vendored
@@ -1,15 +1,34 @@
|
||||
{
|
||||
"files.associations": {
|
||||
"*.page": "php"
|
||||
},
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll": "never",
|
||||
"source.fixAll.eslint": "explicit"
|
||||
},
|
||||
"i18n-ally.localesPaths": [
|
||||
"locales"
|
||||
],
|
||||
"i18n-ally.keystyle": "flat",
|
||||
"eslint.experimental.useFlatConfig": true
|
||||
}
|
||||
|
||||
"files.associations": {
|
||||
"*.page": "php"
|
||||
},
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll": "never",
|
||||
"source.fixAll.eslint": "explicit"
|
||||
},
|
||||
"workbench.colorCustomizations": {
|
||||
"activityBar.activeBackground": "#78797d",
|
||||
"activityBar.background": "#78797d",
|
||||
"activityBar.foreground": "#e7e7e7",
|
||||
"activityBar.inactiveForeground": "#e7e7e799",
|
||||
"activityBarBadge.background": "#df9fac",
|
||||
"activityBarBadge.foreground": "#15202b",
|
||||
"commandCenter.border": "#e7e7e799",
|
||||
"sash.hoverBorder": "#78797d",
|
||||
"statusBar.background": "#5f6063",
|
||||
"statusBar.foreground": "#e7e7e7",
|
||||
"statusBarItem.hoverBackground": "#78797d",
|
||||
"statusBarItem.remoteBackground": "#5f6063",
|
||||
"statusBarItem.remoteForeground": "#e7e7e7",
|
||||
"titleBar.activeBackground": "#5f6063",
|
||||
"titleBar.activeForeground": "#e7e7e7",
|
||||
"titleBar.inactiveBackground": "#5f606399",
|
||||
"titleBar.inactiveForeground": "#e7e7e799"
|
||||
},
|
||||
"peacock.color": "#5f6063",
|
||||
"i18n-ally.localesPaths": [
|
||||
"locales"
|
||||
],
|
||||
"i18n-ally.keystyle": "flat",
|
||||
"eslint.experimental.useFlatConfig": true,
|
||||
}
|
||||
1
.vscode/sftp-template.json
vendored
1
.vscode/sftp-template.json
vendored
@@ -19,4 +19,3 @@
|
||||
".DS_Store"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,61 +0,0 @@
|
||||
# Contributing to Unraid Connect
|
||||
|
||||
Thank you for your interest in contributing to Unraid Connect! We want to make contributing to this project as easy and transparent as possible, whether it's:
|
||||
|
||||
- Reporting a bug
|
||||
- Discussing the current state of the code
|
||||
- Submitting a fix
|
||||
- Proposing new features
|
||||
|
||||
## Development Process
|
||||
|
||||
We use GitHub to host code, to track issues and feature requests, as well as accept pull requests.
|
||||
|
||||
### 1. Work Intent Process
|
||||
|
||||
**Before starting any development work**, you must submit a Work Intent and have it approved:
|
||||
|
||||
1. **Create a Work Intent**
|
||||
- Go to [Issues → New Issue → Work Intent](https://github.com/unraid/api/issues/new?template=work_intent.md)
|
||||
- Fill out the brief template describing what you want to work on
|
||||
- The issue will be automatically labeled as `work-intent` and `unapproved`
|
||||
|
||||
2. **Wait for Approval**
|
||||
- A core developer will review your Work Intent
|
||||
- They may ask questions or suggest changes
|
||||
- Once approved, the `unapproved` label will be removed
|
||||
|
||||
3. **Begin Development**
|
||||
- Only start coding after your Work Intent is approved
|
||||
- Follow the approach outlined in your approved Work Intent
|
||||
- Reference the Work Intent in your future PR
|
||||
|
||||
### 2. Making Changes
|
||||
|
||||
1. Fork the repo and create your branch from `main`
|
||||
2. Make your changes
|
||||
3. Ensure your commits are clear and descriptive
|
||||
4. Keep your changes focused - solve one thing at a time
|
||||
|
||||
### 3. Pull Request Process
|
||||
|
||||
1. Create a pull request from your fork to our `main` branch
|
||||
2. Reference the approved Work Intent in your PR description
|
||||
3. Ensure the PR description clearly describes the problem and solution
|
||||
4. Include screenshots or examples if applicable
|
||||
5. Wait for review from the core team
|
||||
|
||||
**Note:** Direct pushes to the main branch are not allowed. All changes must go through the PR process.
|
||||
|
||||
## Bug Reports and Feature Requests
|
||||
|
||||
We use GitHub issues to track bugs and feature requests:
|
||||
|
||||
- **Bug Report**: Use the [Bug Report Template](https://github.com/unraid/api/issues/new?template=bug_report.md)
|
||||
- **Feature Request**: Use the [Feature Request Template](https://github.com/unraid/api/issues/new?template=feature_request.md)
|
||||
|
||||
For Unraid Connect specific issues (Flash Backup, connect.myunraid.net, mothership connectivity), please submit through our support portal instead.
|
||||
|
||||
## License
|
||||
|
||||
By contributing, you agree that your contributions will be licensed under the same terms as the main project.
|
||||
@@ -1,7 +1,5 @@
|
||||
PATHS_UNRAID_DATA=./dev/data # Where we store plugin data (e.g. permissions.json)
|
||||
PATHS_STATES=./dev/states # Where .ini files live (e.g. vars.ini)
|
||||
PATHS_AUTH_SESSIONS=./dev/sessions # Where user sessions live
|
||||
PATHS_AUTH_KEY=./dev/keys # Auth key directory
|
||||
PATHS_DYNAMIX_BASE=./dev/dynamix # Dynamix's data directory
|
||||
PATHS_DYNAMIX_CONFIG_DEFAULT=./dev/dynamix/default.cfg # Dynamix's default config file, which ships with unraid
|
||||
PATHS_DYNAMIX_CONFIG=./dev/dynamix/dynamix.cfg # Dynamix's config file
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
VERSION="THIS_WILL_BE_REPLACED_WHEN_BUILT"
|
||||
|
||||
PATHS_UNRAID_DATA=./dev/data # Where we store plugin data (e.g. permissions.json)
|
||||
PATHS_STATES=./dev/states # Where .ini files live (e.g. vars.ini)
|
||||
PATHS_AUTH_SESSIONS=./dev/sessions # Where user sessions live
|
||||
PATHS_AUTH_KEY=./dev/keys # Auth key directory
|
||||
PATHS_DYNAMIX_BASE=./dev/dynamix # Dynamix's data directory
|
||||
PATHS_DYNAMIX_CONFIG_DEFAULT=./dev/dynamix/default.cfg # Dynamix's default config file, which ships with unraid
|
||||
PATHS_DYNAMIX_CONFIG=./dev/dynamix/dynamix.cfg # Dynamix's config file
|
||||
PATHS_MY_SERVERS_CONFIG=./dev/Unraid.net/myservers.cfg # My servers config file
|
||||
PATHS_MY_SERVERS_FB=./dev/Unraid.net/fb_keepalive # My servers flashbackup timekeeper file
|
||||
PATHS_KEYFILE_BASE=./dev/Unraid.net # Keyfile location
|
||||
PATHS_MACHINE_ID=./dev/data/machine-id
|
||||
PORT=5000
|
||||
NODE_ENV="test"
|
||||
NODE_ENV=test
|
||||
@@ -1,13 +1,9 @@
|
||||
|
||||
import type { Linter } from 'eslint';
|
||||
import eslint from '@eslint/js';
|
||||
import noRelativeImportPaths from 'eslint-plugin-no-relative-import-paths';
|
||||
import prettier from 'eslint-plugin-prettier';
|
||||
import tseslint from 'typescript-eslint';
|
||||
|
||||
export default tseslint.config(eslint.configs.recommended, ...tseslint.configs.recommended, {
|
||||
plugins: {
|
||||
'no-relative-import-paths': noRelativeImportPaths,
|
||||
prettier: prettier,
|
||||
},
|
||||
rules: {
|
||||
'@typescript-eslint/no-redundant-type-constituents': 'off',
|
||||
'@typescript-eslint/no-unsafe-call': 'off',
|
||||
@@ -21,15 +17,5 @@ export default tseslint.config(eslint.configs.recommended, ...tseslint.configs.r
|
||||
'no-multiple-empty-lines': ['error', { max: 1, maxBOF: 0, maxEOF: 1 }],
|
||||
'@typescript-eslint/no-unused-vars': 'off',
|
||||
'@typescript-eslint/no-unused-expressions': 'off',
|
||||
'import/no-unresolved': 'off',
|
||||
'import/extensions': 'off',
|
||||
'import/no-absolute-path': 'off',
|
||||
'import/prefer-default-export': 'off',
|
||||
'no-relative-import-paths/no-relative-import-paths': [
|
||||
'error',
|
||||
{ allowSameFolder: false, rootDir: 'src', prefix: '@app' },
|
||||
],
|
||||
'prettier/prettier': 'error',
|
||||
},
|
||||
ignores: ['src/graphql/generated/client/**/*'],
|
||||
});
|
||||
|
||||
2
api/.gitignore
vendored
2
api/.gitignore
vendored
@@ -80,5 +80,3 @@ deploy/*
|
||||
|
||||
# IDE Settings Files
|
||||
.idea
|
||||
|
||||
!**/*.login.*
|
||||
|
||||
1
api/.nvmrc
Normal file
1
api/.nvmrc
Normal file
@@ -0,0 +1 @@
|
||||
v20
|
||||
@@ -1,7 +0,0 @@
|
||||
!src/*
|
||||
|
||||
# Downloaded Fixtures (For File Modifications)
|
||||
src/unraid-api/unraid-file-modifier/modifications/__fixtures__/downloaded/*
|
||||
|
||||
# Generated Types
|
||||
src/graphql/generated/client/*.ts
|
||||
7
api/.vscode/settings.json
vendored
7
api/.vscode/settings.json
vendored
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"eslint.lintTask.options": "--flag unstable_ts_config",
|
||||
"eslint.options": {
|
||||
"flags": ["unstable_ts_config"],
|
||||
"overrideConfigFile": ".eslintrc.ts"
|
||||
}
|
||||
}
|
||||
2793
api/CHANGELOG.md
2793
api/CHANGELOG.md
File diff suppressed because it is too large
Load Diff
@@ -19,18 +19,15 @@ WORKDIR /app
|
||||
|
||||
# Set app env
|
||||
ENV NODE_ENV=development
|
||||
ENV PNPM_HOME="/pnpm"
|
||||
ENV PATH="$PNPM_HOME:$PATH"
|
||||
|
||||
# Install pnpm
|
||||
RUN corepack enable && corepack prepare pnpm@8.15.4 --activate && npm i -g npm@latest
|
||||
COPY tsconfig.json .eslintrc.ts .npmrc .env.production .env.staging ./
|
||||
|
||||
COPY tsconfig.json .eslintrc.ts .prettierrc.cjs .npmrc .env.production .env.staging package.json pnpm-lock.yaml .npmrc ./
|
||||
COPY package.json package-lock.json ./
|
||||
|
||||
# Install deps
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile
|
||||
RUN npm i
|
||||
|
||||
EXPOSE 3001
|
||||
EXPOSE 4000
|
||||
|
||||
###########################################################
|
||||
# Builder Image
|
||||
@@ -42,4 +39,4 @@ ENV NODE_ENV=production
|
||||
|
||||
COPY . .
|
||||
|
||||
CMD ["pnpm", "run", "build-and-pack"]
|
||||
CMD ["npm", "run", "build-and-pack"]
|
||||
@@ -17,11 +17,11 @@ root@Devon:~# unraid-api
|
||||
|
||||
Unraid API
|
||||
|
||||
Thanks for using the official Unraid API
|
||||
Thanks for using the official Unraid API
|
||||
|
||||
Usage:
|
||||
|
||||
$ unraid-api command <options>
|
||||
$ unraid-api command <options>
|
||||
|
||||
Commands:
|
||||
|
||||
@@ -29,48 +29,34 @@ Commands:
|
||||
|
||||
Options:
|
||||
|
||||
-h, --help Prints this usage guide.
|
||||
-d, --debug Enabled debug mode.
|
||||
-p, --port string Set the graphql port.
|
||||
--environment production/staging/development Set the working environment.
|
||||
--log-level ALL/TRACE/DEBUG/INFO/WARN/ERROR/FATAL/MARK/OFF Set the log level.
|
||||
-h, --help Prints this usage guide.
|
||||
-d, --debug Enabled debug mode.
|
||||
-p, --port string Set the graphql port.
|
||||
--environment production/staging/development Set the working environment.
|
||||
--log-level ALL/TRACE/DEBUG/INFO/WARN/ERROR/FATAL/MARK/OFF Set the log level.
|
||||
|
||||
Copyright © 2024 Lime Technology, Inc.
|
||||
|
||||
```
|
||||
|
||||
## Key
|
||||
|
||||
To create and work with Unraid API keys, used for the local API, run the following command to view all available options. These options may change over time.
|
||||
|
||||
```sh
|
||||
unraid-api key --help
|
||||
```
|
||||
|
||||
## Report
|
||||
|
||||
To view the current status of the unraid-api and its connection to mothership, run:
|
||||
|
||||
```sh
|
||||
```
|
||||
unraid-api report
|
||||
```
|
||||
|
||||
To view verbose data (anonymized), run:
|
||||
|
||||
```sh
|
||||
```
|
||||
unraid-api report -v
|
||||
```
|
||||
|
||||
To view non-anonymized verbose data, run:
|
||||
|
||||
```sh
|
||||
```
|
||||
unraid-api report -vv
|
||||
```
|
||||
|
||||
## Secrets
|
||||
|
||||
If you found this file you're likely a developer. If you'd like to know more about the API and when it's available please join [our discord](https://discord.unraid.net/).
|
||||
|
||||
## License
|
||||
|
||||
Copyright Lime Technology Inc. All rights reserved.
|
||||
|
||||
105
api/codegen.ts
105
api/codegen.ts
@@ -1,105 +0,0 @@
|
||||
import type { CodegenConfig } from '@graphql-codegen/cli';
|
||||
|
||||
const config: CodegenConfig = {
|
||||
overwrite: true,
|
||||
emitLegacyCommonJSImports: false,
|
||||
verbose: true,
|
||||
config: {
|
||||
namingConvention: {
|
||||
typeNames: './fix-array-type.cjs',
|
||||
enumValues: 'change-case#upperCase',
|
||||
useTypeImports: true,
|
||||
},
|
||||
scalars: {
|
||||
DateTime: 'string',
|
||||
Long: 'number',
|
||||
JSON: '{ [key: string]: any }',
|
||||
URL: 'URL',
|
||||
Port: 'number',
|
||||
UUID: 'string',
|
||||
},
|
||||
},
|
||||
generates: {
|
||||
'src/graphql/generated/client/': {
|
||||
documents: './src/graphql/mothership/*.ts',
|
||||
schema: {
|
||||
[process.env.MOTHERSHIP_GRAPHQL_LINK as string]: {
|
||||
headers: {
|
||||
origin: 'https://forums.unraid.net',
|
||||
},
|
||||
},
|
||||
},
|
||||
preset: 'client',
|
||||
presetConfig: {
|
||||
gqlTagName: 'graphql',
|
||||
},
|
||||
config: {
|
||||
useTypeImports: true,
|
||||
withObjectType: true,
|
||||
},
|
||||
plugins: [
|
||||
{ add: { content: '/* eslint-disable */' } },
|
||||
],
|
||||
},
|
||||
// Generate Types for the API Server
|
||||
'src/graphql/generated/api/types.ts': {
|
||||
schema: [
|
||||
'./src/graphql/types.ts',
|
||||
'./src/graphql/schema/types/**/*.graphql',
|
||||
],
|
||||
plugins: [
|
||||
'typescript',
|
||||
'typescript-resolvers',
|
||||
{ add: { content: '/* eslint-disable */' } },
|
||||
],
|
||||
config: {
|
||||
contextType: '@app/graphql/schema/utils#Context',
|
||||
useIndexSignature: true,
|
||||
},
|
||||
},
|
||||
// Generate Operations for any built-in API Server Operations (e.g., report.ts)
|
||||
'src/graphql/generated/api/operations.ts': {
|
||||
documents: './src/graphql/client/api/*.ts',
|
||||
schema: [
|
||||
'./src/graphql/types.ts',
|
||||
'./src/graphql/schema/types/**/*.graphql',
|
||||
],
|
||||
preset: 'import-types',
|
||||
presetConfig: {
|
||||
typesPath: '@app/graphql/generated/api/types',
|
||||
},
|
||||
plugins: [
|
||||
'typescript-validation-schema',
|
||||
'typescript-operations',
|
||||
'typed-document-node',
|
||||
{ add: { content: '/* eslint-disable */' } },
|
||||
],
|
||||
config: {
|
||||
importFrom: '@app/graphql/generated/api/types',
|
||||
strictScalars: false,
|
||||
schema: 'zod',
|
||||
withObjectType: true,
|
||||
},
|
||||
},
|
||||
'src/graphql/generated/client/validators.ts': {
|
||||
schema: {
|
||||
[process.env.MOTHERSHIP_GRAPHQL_LINK as string]: {
|
||||
headers: {
|
||||
origin: 'https://forums.unraid.net',
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: [
|
||||
'typescript-validation-schema',
|
||||
{ add: { content: '/* eslint-disable */' } },
|
||||
],
|
||||
config: {
|
||||
importFrom: '@app/graphql/generated/client/graphql',
|
||||
strictScalars: false,
|
||||
schema: 'zod',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export default config;
|
||||
77
api/codegen.yml
Normal file
77
api/codegen.yml
Normal file
@@ -0,0 +1,77 @@
|
||||
overwrite: true
|
||||
emitLegacyCommonJSImports: false
|
||||
verbose: true
|
||||
require:
|
||||
- ts-node/register
|
||||
config:
|
||||
namingConvention:
|
||||
typeNames: './fix-array-type.cjs'
|
||||
enumValues: 'change-case#upperCase'
|
||||
useTypeImports: true
|
||||
scalars:
|
||||
DateTime: string
|
||||
Long: number
|
||||
JSON: "{ [key: string]: any }"
|
||||
URL: URL
|
||||
Port: number
|
||||
UUID: string
|
||||
|
||||
generates:
|
||||
src/graphql/generated/client/:
|
||||
documents: './src/graphql/mothership/*.ts'
|
||||
schema:
|
||||
'${MOTHERSHIP_GRAPHQL_LINK}':
|
||||
headers:
|
||||
origin: 'https://forums.unraid.net'
|
||||
preset: client
|
||||
presetConfig:
|
||||
gqlTagName: graphql
|
||||
config:
|
||||
useTypeImports: true
|
||||
withObjectType: true
|
||||
plugins:
|
||||
- add: { content: '/* eslint-disable */' }
|
||||
|
||||
# Generate Types for the API Server
|
||||
src/graphql/generated/api/types.ts:
|
||||
schema:
|
||||
- './src/graphql/types.ts'
|
||||
- './src/graphql/schema/types/**/*.graphql'
|
||||
plugins:
|
||||
- typescript
|
||||
- typescript-resolvers
|
||||
- add: { content: '/* eslint-disable */' }
|
||||
config:
|
||||
contextType: '@app/graphql/schema/utils#Context'
|
||||
useIndexSignature: true
|
||||
# Generate Operations for any built in API Server Operations (ie report.ts)
|
||||
src/graphql/generated/api/operations.ts:
|
||||
documents: './src/graphql/client/api/*.ts'
|
||||
schema:
|
||||
- './src/graphql/types.ts'
|
||||
- './src/graphql/schema/types/**/*.graphql'
|
||||
preset: import-types
|
||||
presetConfig:
|
||||
typesPath: '@app/graphql/generated/api/types'
|
||||
plugins:
|
||||
- typescript-validation-schema
|
||||
- typescript-operations
|
||||
- typed-document-node
|
||||
- add: { content: '/* eslint-disable */' }
|
||||
config:
|
||||
importFrom: '@app/graphql/generated/api/types'
|
||||
strictScalars: false
|
||||
schema: 'zod'
|
||||
withObjectType: true
|
||||
src/graphql/generated/client/validators.ts:
|
||||
schema:
|
||||
'${MOTHERSHIP_GRAPHQL_LINK}':
|
||||
headers:
|
||||
origin: 'https://forums.unraid.net'
|
||||
plugins:
|
||||
- typescript-validation-schema
|
||||
- add: { content: '/* eslint-disable */'}
|
||||
config:
|
||||
importFrom: '@app/graphql/generated/client/graphql'
|
||||
strictScalars: false
|
||||
schema: 'zod'
|
||||
@@ -1,20 +1,21 @@
|
||||
[api]
|
||||
version="4.0.1"
|
||||
version="3.11.0"
|
||||
extraOrigins="https://google.com,https://test.com"
|
||||
[local]
|
||||
sandbox="yes"
|
||||
[notifier]
|
||||
apikey="unnotify_30994bfaccf839c65bae75f7fa12dd5ee16e69389f754c3b98ed7d5"
|
||||
[remote]
|
||||
wanaccess="yes"
|
||||
wanport="8443"
|
||||
upnpEnabled="no"
|
||||
apikey="_______________________BIG_API_KEY_HERE_________________________"
|
||||
localApiKey="_______________________LOCAL_API_KEY_HERE_________________________"
|
||||
email="test@example.com"
|
||||
username="zspearmint"
|
||||
avatar="https://via.placeholder.com/200"
|
||||
regWizTime="1611175408732_0951-1653-3509-FBA155FA23C0"
|
||||
accesstoken=""
|
||||
idtoken=""
|
||||
accesstoken=""
|
||||
refreshtoken=""
|
||||
dynamicRemoteAccessType="DISABLED"
|
||||
ssoSubIds=""
|
||||
[upc]
|
||||
apikey="unupc_fab6ff6ffe51040595c6d9ffb63a353ba16cc2ad7d93f813a2e80a5810"
|
||||
|
||||
191
api/dev/data/permissions.json
Normal file
191
api/dev/data/permissions.json
Normal file
@@ -0,0 +1,191 @@
|
||||
{
|
||||
"admin": {
|
||||
"extends": "user",
|
||||
"permissions": [
|
||||
{
|
||||
"resource": "apikey",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "array",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "cpu",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "device",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "device/unassigned",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "disk",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "disk/settings",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "display",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "docker/container",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "docker/network",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "info",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "license-key",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "machine-id",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "memory",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "notifications",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "online",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "os",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "parity-history",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "permission",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "servers",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "service",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "service/emhttpd",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "service/unraid-api",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "services",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "share",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "software-versions",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "unraid-version",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "user",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "var",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "vars",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "vm/domain",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "vm/network",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
}
|
||||
]
|
||||
},
|
||||
"user": {
|
||||
"extends": "guest",
|
||||
"permissions": [
|
||||
{
|
||||
"resource": "apikey",
|
||||
"action": "read:own",
|
||||
"attributes": "*"
|
||||
},
|
||||
{
|
||||
"resource": "permission",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
}
|
||||
]
|
||||
},
|
||||
"guest": {
|
||||
"permissions": [
|
||||
{
|
||||
"resource": "welcome",
|
||||
"action": "read:any",
|
||||
"attributes": "*"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,5 @@
|
||||
[display]
|
||||
date="%c"
|
||||
time="%I:%M %p"
|
||||
number=".,"
|
||||
scale="-1"
|
||||
tabs="1"
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"createdAt": "2025-01-27T16:22:56.501Z",
|
||||
"description": "API key for Connect user",
|
||||
"id": "b5b4aa3d-8e40-4c92-bc40-d50182071886",
|
||||
"key": "_______________________LOCAL_API_KEY_HERE_________________________",
|
||||
"name": "Connect",
|
||||
"permissions": [],
|
||||
"roles": [
|
||||
"connect"
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
timestamp=1683971161
|
||||
event=Unraid Parity check
|
||||
subject=Notice [UNRAID] - Parity check finished (0 errors)
|
||||
description=Canceled
|
||||
importance=warning
|
||||
@@ -1 +0,0 @@
|
||||
unraid_login|i:1736523078;unraid_user|s:4:"root";locale|s:0:"";buildDate|s:8:"20241202";
|
||||
@@ -1,24 +1,24 @@
|
||||
[api]
|
||||
version="4.0.1"
|
||||
version="3.11.0"
|
||||
extraOrigins="https://google.com,https://test.com"
|
||||
[local]
|
||||
sandbox="yes"
|
||||
[notifier]
|
||||
apikey="unnotify_30994bfaccf839c65bae75f7fa12dd5ee16e69389f754c3b98ed7d5"
|
||||
[remote]
|
||||
wanaccess="yes"
|
||||
wanport="8443"
|
||||
upnpEnabled="no"
|
||||
apikey="_______________________BIG_API_KEY_HERE_________________________"
|
||||
localApiKey="_______________________LOCAL_API_KEY_HERE_________________________"
|
||||
email="test@example.com"
|
||||
username="zspearmint"
|
||||
avatar="https://via.placeholder.com/200"
|
||||
regWizTime="1611175408732_0951-1653-3509-FBA155FA23C0"
|
||||
accesstoken=""
|
||||
idtoken=""
|
||||
accesstoken=""
|
||||
refreshtoken=""
|
||||
dynamicRemoteAccessType="DISABLED"
|
||||
ssoSubIds=""
|
||||
allowedOrigins="/var/run/unraid-notifications.sock, /var/run/unraid-php.sock, /var/run/unraid-cli.sock, http://localhost:8080, https://localhost:4443, https://tower.local:4443, https://192.168.1.150:4443, https://tower:4443, https://192-168-1-150.thisisfourtyrandomcharacters012345678900.myunraid.net:4443, https://85-121-123-122.thisisfourtyrandomcharacters012345678900.myunraid.net:8443, https://10-252-0-1.hash.myunraid.net:4443, https://10-252-1-1.hash.myunraid.net:4443, https://10-253-3-1.hash.myunraid.net:4443, https://10-253-4-1.hash.myunraid.net:4443, https://10-253-5-1.hash.myunraid.net:4443, https://10-100-0-1.hash.myunraid.net:4443, https://10-100-0-2.hash.myunraid.net:4443, https://10-123-1-2.hash.myunraid.net:4443, https://221-123-121-112.hash.myunraid.net:4443, https://google.com, https://test.com, https://connect.myunraid.net, https://connect-staging.myunraid.net, https://dev-my.myunraid.net:4000, https://studio.apollographql.com"
|
||||
dynamicRemoteAccessType="DISABLED"
|
||||
[upc]
|
||||
apikey="unupc_fab6ff6ffe51040595c6d9ffb63a353ba16cc2ad7d93f813a2e80a5810"
|
||||
[connectionStatus]
|
||||
minigraph="PRE_INIT"
|
||||
upnpStatus=""
|
||||
minigraph="ERROR_RETRYING"
|
||||
|
||||
@@ -1,12 +1,27 @@
|
||||
x-common: &common
|
||||
version: '3.8'
|
||||
|
||||
x-volumes: &volumes
|
||||
volumes:
|
||||
- ./:/app
|
||||
- pnpm-store:/pnpm/store
|
||||
- ../libvirt:/libvirt
|
||||
environment:
|
||||
- IS_DOCKER=true
|
||||
- GIT_SHA=${GIT_SHA:-unknown}
|
||||
- IS_TAGGED=${IS_TAGGED:-false}
|
||||
- ./dev:/app/dev
|
||||
- ./src:/app/src
|
||||
- ./package.json:/app/package.json
|
||||
- ./package-lock.json:/app/package-lock.json
|
||||
- ./tsconfig.json:/app/tsconfig.json
|
||||
- ./vite.config.ts:/app/vite.config.ts
|
||||
- ./dist/:/app/dist/
|
||||
- ./deploy/:/app/deploy/
|
||||
- ./README.md:/app/README.md
|
||||
- ./scripts/:/app/scripts/
|
||||
- ../.git/:/app/.git/
|
||||
- ./.env.production:/app/.env.production
|
||||
- ./.env.staging:/app/.env.staging
|
||||
- ./.env.test:/app/.env.test
|
||||
- ./.env.development:/app/.env.development
|
||||
- ./codegen.yml:/app/codegen.yml
|
||||
- ./fix-array-type.cjs:/app/fix-array-type.cjs
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
- ./unraid-api.js:/app/unraid-api.js
|
||||
- ./ecosystem.config.json:/app/ecosystem.config.json
|
||||
|
||||
services:
|
||||
|
||||
@@ -18,10 +33,14 @@ services:
|
||||
context: .
|
||||
target: development
|
||||
dockerfile: Dockerfile
|
||||
<<: *common
|
||||
<<: *volumes
|
||||
stdin_open: true
|
||||
tty: true
|
||||
entrypoint: /bin/bash
|
||||
environment:
|
||||
- IS_DOCKER=true
|
||||
- GIT_SHA=${GIT_SHA:?err}
|
||||
- IS_TAGGED=${IS_TAGGED}
|
||||
profiles:
|
||||
- builder
|
||||
|
||||
@@ -33,23 +52,24 @@ services:
|
||||
context: .
|
||||
target: development
|
||||
dockerfile: Dockerfile
|
||||
<<: *common
|
||||
<<: *volumes
|
||||
command: npm run start:dev
|
||||
environment:
|
||||
- IS_DOCKER=true
|
||||
- GIT_SHA=${GIT_SHA:?err}
|
||||
- IS_TAGGED=${IS_TAGGED}
|
||||
profiles:
|
||||
- builder
|
||||
|
||||
builder:
|
||||
image: unraid-api:builder
|
||||
environment:
|
||||
- GIT_SHA=${GIT_SHA:?err}
|
||||
- IS_TAGGED=${IS_TAGGED}
|
||||
build:
|
||||
context: .
|
||||
target: builder
|
||||
dockerfile: Dockerfile
|
||||
<<: *common
|
||||
<<: *volumes
|
||||
profiles:
|
||||
- builder
|
||||
|
||||
volumes:
|
||||
pnpm-store:
|
||||
name: "pnpm-store"
|
||||
pnpm-cache:
|
||||
name: "pnpm-cache"
|
||||
- builder
|
||||
@@ -1,82 +0,0 @@
|
||||
# Repository Organization
|
||||
|
||||
This document describes the high-level architecture of the Unraid API repository.
|
||||
|
||||
## Overview
|
||||
|
||||
The repository consists of:
|
||||
|
||||
- API Server (NestJS)
|
||||
- Redux Store
|
||||
- Core Modules
|
||||
- Tests
|
||||
|
||||
## API Server Architecture
|
||||
|
||||
The API server is built with NestJS and provides the core functionality for interacting with Unraid systems.
|
||||
|
||||
### Key Components:
|
||||
|
||||
- `src/unraid-api/` - Core NestJS implementation
|
||||
- `src/core/` - Legacy business logic and utilities
|
||||
- `src/store/` - Redux store and state management
|
||||
- `src/common/` - Shared utilities and types
|
||||
|
||||
## Redux Store
|
||||
|
||||
The store manages application state through several modules:
|
||||
|
||||
### Store Modules
|
||||
|
||||
- `config` - User settings, authentication, and API configuration
|
||||
- `emhttp` - Unraid system and array state
|
||||
- `registration` - License management
|
||||
- `cache` - Application caching
|
||||
- `docker` - Container management
|
||||
- `upnp` - UPnP functionality
|
||||
- `dynamix` - Plugin state
|
||||
- `minigraph` - Mothership connectivity
|
||||
- `notifications` - System notifications
|
||||
|
||||
### Store Listeners
|
||||
|
||||
Key listeners that handle side effects:
|
||||
|
||||
- Array state changes
|
||||
- Configuration updates
|
||||
- Remote access changes
|
||||
- Server state updates
|
||||
- UPnP changes
|
||||
- WAN access changes
|
||||
|
||||
### Store Synchronization
|
||||
|
||||
The store syncs data in two ways:
|
||||
|
||||
- Flash Storage - Persistent configuration
|
||||
- Memory Storage - Runtime state
|
||||
|
||||
## Project Structure
|
||||
|
||||
The repository is organized into several packages:
|
||||
|
||||
- `api/` - NestJS API server
|
||||
- `plugin/` - Unraid plugin package
|
||||
- `web/` - Frontend application
|
||||
- `unraid-ui/` - Shared UI components
|
||||
|
||||
## Development Flow
|
||||
|
||||
New development should focus on the NestJS implementation in `src/unraid-api/`:
|
||||
|
||||
1. Create new features in `src/unraid-api/` using NestJS patterns
|
||||
2. Use dependency injection and NestJS modules
|
||||
3. Legacy code in `src/core/` should be gradually migrated
|
||||
4. State management still uses Redux store when needed
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. Follow NestJS architectural patterns
|
||||
2. Use TypeScript decorators and strong typing
|
||||
3. Implement proper dependency injection
|
||||
4. Write unit tests for new services
|
||||
34
api/docs/developing-for-the-api.md
Normal file
34
api/docs/developing-for-the-api.md
Normal file
@@ -0,0 +1,34 @@
|
||||
# How to enable introspection and view possible API endpoints for the Unraid API
|
||||
|
||||
1. Install the API on your machine
|
||||
2. Stop the running api with `unraid-api stop`
|
||||
3. Enable an allowed origin for Apollo Studio:
|
||||
|
||||
- Edit the file at `/boot/config/plugins/dynamix.my.servers/myservers.cfg
|
||||
- Add the line `extraOrigins="studio.apollographql.com"` inside of the `[api]` block
|
||||
|
||||
```ini
|
||||
[api]
|
||||
...
|
||||
extraOrigins="studio.apollographql.com"
|
||||
[local]
|
||||
...rest
|
||||
```
|
||||
|
||||
- Also copy out the `[upc] -> apikey` setting, it should look something like `unupc_52e45431703b1e79cef709bfaf7ddc469bafc12e091b7c9bca0f6e96dc`
|
||||
|
||||
4. Enable introspection
|
||||
|
||||
```sh
|
||||
INTROSPECTION=true LOG_LEVEL=trace LOG_TYPE=pretty unraid-api start --debug
|
||||
```
|
||||
|
||||
- If you run this command and it says the Unraid API is already started, run `unraid-api stop` before trying it again.
|
||||
|
||||
5. Use introspection to your server with Apollo Sandbox:
|
||||
- Navigate your *Chrome* browser to [Apollo Sandbox](https://studio.apollographql.com/sandbox/explorer/)
|
||||
- Click the settings icon in the top right corner and set the URL to your servers URL + /graphql. For example a server URL might be: `https://192-168-1-3.277ace5dd0892eacd83f517b39fb3d1dd32078b5.myunraid.net:8443/graphql` or `http://tower.local/graphql`
|
||||
- Also set the API key you copied earlier in the header section. Set the key as `x-api-key` and the value to the API key you copied in Step 2.
|
||||

|
||||
|
||||
6. Now that your server should be connected, you should see the schema populate. To perform queries, click the plus icon on the field on the left side to add them to a query and then click to run icon on the right.
|
||||
@@ -1,4 +0,0 @@
|
||||
{
|
||||
"label": "Unraid API",
|
||||
"position": 4
|
||||
}
|
||||
@@ -1,162 +0,0 @@
|
||||
# CLI Commands
|
||||
|
||||
### Start
|
||||
|
||||
```bash
|
||||
unraid-api start [--log-level <level>]
|
||||
```
|
||||
|
||||
Starts the Unraid API service.
|
||||
|
||||
Options:
|
||||
- `--log-level`: Set logging level (trace|debug|info|warn|error)
|
||||
|
||||
### Stop
|
||||
|
||||
```bash
|
||||
unraid-api stop [--delete]
|
||||
```
|
||||
|
||||
Stops the Unraid API service.
|
||||
|
||||
- `--delete`: Optional. Delete the PM2 home directory
|
||||
|
||||
### Restart
|
||||
|
||||
```bash
|
||||
unraid-api restart
|
||||
```
|
||||
|
||||
Restarts the Unraid API service.
|
||||
|
||||
### Logs
|
||||
|
||||
```bash
|
||||
unraid-api logs [-l <lines>]
|
||||
```
|
||||
|
||||
View the API logs.
|
||||
|
||||
- `-l, --lines`: Optional. Number of lines to tail (default: 100)
|
||||
|
||||
## Configuration Commands
|
||||
|
||||
### Config
|
||||
|
||||
```bash
|
||||
unraid-api config
|
||||
```
|
||||
|
||||
Displays current configuration values.
|
||||
|
||||
### Switch Environment
|
||||
|
||||
```bash
|
||||
unraid-api switch-env [-e <environment>]
|
||||
```
|
||||
|
||||
Switch between production and staging environments.
|
||||
|
||||
- `-e, --environment`: Optional. Target environment (production|staging)
|
||||
|
||||
### Developer Mode
|
||||
|
||||
```bash
|
||||
unraid-api developer
|
||||
```
|
||||
|
||||
Configure developer features for the API (e.g., GraphQL sandbox).
|
||||
|
||||
## API Key Management
|
||||
|
||||
### API Key Commands
|
||||
|
||||
```bash
|
||||
unraid-api apikey [options]
|
||||
```
|
||||
|
||||
Create and manage API keys.
|
||||
|
||||
Options:
|
||||
|
||||
- `--name <name>`: Name of the key
|
||||
- `--create`: Create a new key
|
||||
- `-r, --roles <roles>`: Comma-separated list of roles
|
||||
- `-p, --permissions <permissions>`: Comma-separated list of permissions
|
||||
- `-d, --description <description>`: Description for the key
|
||||
|
||||
## SSO (Single Sign-On) Management
|
||||
|
||||
### SSO Base Command
|
||||
|
||||
```bash
|
||||
unraid-api sso
|
||||
```
|
||||
|
||||
#### Add SSO User
|
||||
|
||||
```bash
|
||||
unraid-api sso add-user
|
||||
# or
|
||||
unraid-api sso add
|
||||
# or
|
||||
unraid-api sso a
|
||||
```
|
||||
|
||||
Add a new user for SSO authentication.
|
||||
|
||||
#### Remove SSO User
|
||||
|
||||
```bash
|
||||
unraid-api sso remove-user
|
||||
# or
|
||||
unraid-api sso remove
|
||||
# or
|
||||
unraid-api sso r
|
||||
```
|
||||
|
||||
Remove a user (or all users) from SSO.
|
||||
|
||||
#### List SSO Users
|
||||
|
||||
```bash
|
||||
unraid-api sso list-users
|
||||
# or
|
||||
unraid-api sso list
|
||||
# or
|
||||
unraid-api sso l
|
||||
```
|
||||
|
||||
List all configured SSO users.
|
||||
|
||||
#### Validate SSO Token
|
||||
|
||||
```bash
|
||||
unraid-api sso validate-token <token>
|
||||
# or
|
||||
unraid-api sso validate
|
||||
# or
|
||||
unraid-api sso v
|
||||
```
|
||||
|
||||
Validates an SSO token and returns its status.
|
||||
|
||||
## Report Generation
|
||||
|
||||
### Generate Report
|
||||
|
||||
```bash
|
||||
unraid-api report [-r] [-j]
|
||||
```
|
||||
|
||||
Generate a system report.
|
||||
|
||||
- `-r, --raw`: Display raw command output
|
||||
- `-j, --json`: Display output in JSON format
|
||||
|
||||
## Notes
|
||||
|
||||
1. Most commands require appropriate permissions to modify system state
|
||||
2. Some commands require the API to be running or stopped
|
||||
3. Store API keys securely as they provide system access
|
||||
4. SSO configuration changes may require a service restart
|
||||
@@ -1,202 +0,0 @@
|
||||
# Using the Unraid API
|
||||
|
||||
The Unraid API provides a GraphQL interface that allows you to interact with your Unraid server. This guide will help you get started with exploring and using the API.
|
||||
|
||||
## Enabling the GraphQL Sandbox
|
||||
|
||||
1. First, enable developer mode using the CLI:
|
||||
|
||||
```bash
|
||||
unraid-api developer
|
||||
```
|
||||
|
||||
2. Follow the prompts to enable the sandbox. This will allow you to access the Apollo Sandbox interface.
|
||||
|
||||
3. Access the GraphQL playground by navigating to:
|
||||
|
||||
```txt
|
||||
http://YOUR_SERVER_IP/graphql
|
||||
```
|
||||
|
||||
## Authentication
|
||||
|
||||
Most queries and mutations require authentication. You can authenticate using either:
|
||||
|
||||
1. API Keys
|
||||
2. Cookies (default method when signed into the WebGUI)
|
||||
|
||||
### Creating an API Key
|
||||
|
||||
Use the CLI to create an API key:
|
||||
|
||||
```bash
|
||||
unraid-api apikey --create
|
||||
```
|
||||
|
||||
Follow the prompts to set:
|
||||
|
||||
- Name
|
||||
- Description
|
||||
- Roles
|
||||
- Permissions
|
||||
|
||||
The generated API key should be included in your GraphQL requests as a header:
|
||||
|
||||
```json
|
||||
{
|
||||
"x-api-key": "YOUR_API_KEY"
|
||||
}
|
||||
```
|
||||
|
||||
## Available Schemas
|
||||
|
||||
The API provides access to various aspects of your Unraid server:
|
||||
|
||||
### System Information
|
||||
|
||||
- Query system details including CPU, memory, and OS information
|
||||
- Monitor system status and health
|
||||
- Access baseboard and hardware information
|
||||
|
||||
### Array Management
|
||||
|
||||
- Query array status and configuration
|
||||
- Manage array operations (start/stop)
|
||||
- Monitor disk status and health
|
||||
- Perform parity checks
|
||||
|
||||
### Docker Management
|
||||
|
||||
- List and manage Docker containers
|
||||
- Monitor container status
|
||||
- Manage Docker networks
|
||||
|
||||
### Remote Access
|
||||
|
||||
- Configure and manage remote access settings
|
||||
- Handle SSO configuration
|
||||
- Manage allowed origins
|
||||
|
||||
### Example Queries
|
||||
|
||||
1. Check System Status:
|
||||
|
||||
```graphql
|
||||
query {
|
||||
info {
|
||||
os {
|
||||
platform
|
||||
distro
|
||||
release
|
||||
uptime
|
||||
}
|
||||
cpu {
|
||||
manufacturer
|
||||
brand
|
||||
cores
|
||||
threads
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
2. Monitor Array Status:
|
||||
|
||||
```graphql
|
||||
query {
|
||||
array {
|
||||
state
|
||||
capacity {
|
||||
disks {
|
||||
free
|
||||
used
|
||||
total
|
||||
}
|
||||
}
|
||||
disks {
|
||||
name
|
||||
size
|
||||
status
|
||||
temp
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
3. List Docker Containers:
|
||||
|
||||
```graphql
|
||||
query {
|
||||
dockerContainers {
|
||||
id
|
||||
names
|
||||
state
|
||||
status
|
||||
autoStart
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Schema Types
|
||||
|
||||
The API includes several core types:
|
||||
|
||||
### Base Types
|
||||
|
||||
- `Node`: Interface for objects with unique IDs - please see [Object Identification](https://graphql.org/learn/global-object-identification/)
|
||||
- `JSON`: For complex JSON data
|
||||
- `DateTime`: For timestamp values
|
||||
- `Long`: For 64-bit integers
|
||||
|
||||
### Resource Types
|
||||
|
||||
- `Array`: Array and disk management
|
||||
- `Docker`: Container and network management
|
||||
- `Info`: System information
|
||||
- `Config`: Server configuration
|
||||
- `Connect`: Remote access settings
|
||||
|
||||
### Role-Based Access
|
||||
|
||||
Available roles:
|
||||
|
||||
- `admin`: Full access
|
||||
- `connect`: Remote access features
|
||||
- `guest`: Limited read access
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. Use the Apollo Sandbox to explore the schema and test queries
|
||||
2. Start with small queries and gradually add fields as needed
|
||||
3. Monitor your query complexity to maintain performance
|
||||
4. Use appropriate roles and permissions for your API keys
|
||||
5. Keep your API keys secure and rotate them periodically
|
||||
|
||||
## Rate Limiting
|
||||
|
||||
The API implements rate limiting to prevent abuse. Ensure your applications handle rate limit responses appropriately.
|
||||
|
||||
## Error Handling
|
||||
|
||||
The API returns standard GraphQL errors in the following format:
|
||||
|
||||
```json
|
||||
{
|
||||
"errors": [
|
||||
{
|
||||
"message": "Error description",
|
||||
"locations": [...],
|
||||
"path": [...]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## Additional Resources
|
||||
|
||||
- Use the Apollo Sandbox's schema explorer to browse all available types and fields
|
||||
- Check the documentation tab in Apollo Sandbox for detailed field descriptions
|
||||
- Monitor the API's health using `unraid-api status`
|
||||
- Generate reports using `unraid-api report` for troubleshooting
|
||||
|
||||
For more information about specific commands and configuration options, refer to the CLI documentation or run `unraid-api --help`.
|
||||
@@ -1,37 +0,0 @@
|
||||
# Unraid API
|
||||
|
||||
The Unraid API provides a GraphQL interface for programmatic interaction with your Unraid server. It enables automation, monitoring, and integration capabilities.
|
||||
|
||||
## Current Availability
|
||||
|
||||
The API is available through the Unraid Connect Plugin:
|
||||
|
||||
1. Install Unraid Connect Plugin from Apps
|
||||
2. [Configure the plugin](./how-to-use-the-api.md#enabling-the-graphql-sandbox)
|
||||
3. Access API functionality through the [GraphQL Sandbox](./how-to-use-the-api.md#accessing-the-graphql-sandbox)
|
||||
|
||||
## Future Availability
|
||||
|
||||
The API will be integrated directly into the Unraid operating system in an upcoming OS release. This integration will:
|
||||
|
||||
- Make the API a core part of the Unraid system
|
||||
- Remove the need for separate plugin installation
|
||||
- Enable deeper system integration capabilities
|
||||
|
||||
## Documentation Sections
|
||||
|
||||
- [CLI Commands](./cli.md) - Reference for all available command-line interface commands
|
||||
- [Using the Unraid API](./how-to-use-the-api.md) - Comprehensive guide on using the GraphQL API
|
||||
- [Upcoming Features](./upcoming-features.md) - Roadmap of planned features and improvements
|
||||
|
||||
## Key Features
|
||||
|
||||
The API provides:
|
||||
|
||||
- GraphQL Interface: Modern, flexible API with strong typing
|
||||
- Authentication: Secure access via API keys or session cookies
|
||||
- Comprehensive Coverage: Access to system information, array management, and Docker operations
|
||||
- Developer Tools: Built-in GraphQL sandbox for testing
|
||||
- Role-Based Access: Granular permission control
|
||||
|
||||
For detailed usage instructions, see [CLI Commands](./cli.md).
|
||||
@@ -1,71 +0,0 @@
|
||||
# Upcoming Features
|
||||
|
||||
Note: This roadmap outlines planned features and improvements for the Unraid API. Features and timelines may change based on development priorities and community feedback.
|
||||
|
||||
## Core Infrastructure
|
||||
|
||||
| Feature | Status | Tag |
|
||||
|---------|--------|-----|
|
||||
| API Development Environment Improvements | Done | v4.0.0 |
|
||||
| Include API in Unraid OS | Planned (Q1 2025) | - |
|
||||
| Make API Open Source | Planned (Q1 2025) | - |
|
||||
| Separate API from Connect Plugin | Planned (Q2 2025) | - |
|
||||
| Developer Tools for Plugins | Planned (Q2 2025) | - |
|
||||
|
||||
## Security & Authentication
|
||||
|
||||
| Feature | Status | Tag |
|
||||
|---------|--------|-----|
|
||||
| Permissions System Rewrite | Done | v4.0.0 |
|
||||
| User Interface Component Library | In Progress | - |
|
||||
|
||||
## User Interface Improvements
|
||||
|
||||
| Feature | Status | Tag |
|
||||
|---------|--------|-----|
|
||||
| New Settings Pages | Planned (Q2 2025) | - |
|
||||
| Custom Theme Creator | Planned (Q2-Q3 2025) | - |
|
||||
| New Connect Settings Interface | Planned (Q1 2025) | - |
|
||||
|
||||
## Array Management
|
||||
|
||||
| Feature | Status | Tag |
|
||||
|---------|--------|-----|
|
||||
| Array Status Monitoring | Done | v4.0.0 |
|
||||
| Storage Pool Creation Interface | Planned (Q2 2025) | - |
|
||||
| Storage Pool Status Interface | Planned (Q2 2025) | - |
|
||||
|
||||
## Docker Integration
|
||||
|
||||
| Feature | Status | Tag |
|
||||
|---------|--------|-----|
|
||||
| Docker Container Status Monitoring | Done | v4.0.0 |
|
||||
| New Docker Status Interface Design | Planned (Q3 2025) | - |
|
||||
| New Docker Status Interface | Planned (Q3 2025) | - |
|
||||
| Docker Container Setup Interface | Planned (Q3 2025) | - |
|
||||
| Docker Compose Support | Planned | - |
|
||||
|
||||
## Share Management
|
||||
|
||||
| Feature | Status | Tag |
|
||||
|---------|--------|-----|
|
||||
| Array/Cache Share Status Monitoring | Done | v4.0.0 |
|
||||
| Storage Share Creation & Settings | Planned | - |
|
||||
| Storage Share Management Interface | Planned | - |
|
||||
|
||||
## Plugin System
|
||||
|
||||
| Feature | Status | Tag |
|
||||
|---------|--------|-----|
|
||||
| New Plugins Interface | Planned (Q3 2025) | - |
|
||||
| Plugin Management Interface | Planned | - |
|
||||
| Plugin Development Tools | Planned | - |
|
||||
|
||||
## Notifications
|
||||
|
||||
| Feature | Status | Tag |
|
||||
|---------|--------|-----|
|
||||
| Notifications System | Done | v4.0.0 |
|
||||
| Notifications Interface | Done | v4.0.0 |
|
||||
|
||||
Features marked as "Done" are available in current releases. The tag column shows the version where a feature was first introduced.
|
||||
@@ -1,19 +1,18 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/pm2-ecosystem",
|
||||
"apps": [
|
||||
{
|
||||
"name": "unraid-api",
|
||||
"script": "./dist/main.js",
|
||||
"script": "npm",
|
||||
"args": "start",
|
||||
"cwd": "/usr/local/unraid-api",
|
||||
"log": "/var/log/unraid-api/unraid-api.log",
|
||||
"exec_mode": "fork",
|
||||
"wait_ready": true,
|
||||
"listen_timeout": 15000,
|
||||
"max_restarts": 10,
|
||||
"min_uptime": 10000,
|
||||
"watch": false,
|
||||
"ignore_watch": ["node_modules", "src", ".env.*", "myservers.cfg"],
|
||||
"log_file": "/var/log/graphql-api.log",
|
||||
"kill_timeout": 10000
|
||||
"ignore_watch": [
|
||||
"node_modules",
|
||||
"src",
|
||||
".env.*",
|
||||
"myservers.cfg"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
20
api/justfile
20
api/justfile
@@ -1,20 +0,0 @@
|
||||
set fallback
|
||||
|
||||
default:
|
||||
@just --list --justfile {{justfile()}} --list-heading $'\nAPI project recipes:\n'
|
||||
@just list-commands
|
||||
|
||||
setup:
|
||||
pnpm install
|
||||
pnpm run container:build
|
||||
|
||||
# builds js files that can run on an unraid server
|
||||
@build:
|
||||
pnpm run build
|
||||
|
||||
# deploys to an unraid server
|
||||
@deploy:
|
||||
./scripts/deploy-dev.sh
|
||||
|
||||
# build & deploy
|
||||
bd: build deploy
|
||||
27166
api/package-lock.json
generated
Normal file
27166
api/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
377
api/package.json
377
api/package.json
@@ -1,195 +1,186 @@
|
||||
{
|
||||
"name": "@unraid/api",
|
||||
"version": "4.1.2",
|
||||
"main": "src/cli/index.ts",
|
||||
"type": "module",
|
||||
"corepack": {
|
||||
"enabled": true
|
||||
},
|
||||
"repository": "git@github.com:unraid/api.git",
|
||||
"author": "Lime Technology, Inc. <unraid.net>",
|
||||
"license": "UNLICENSED",
|
||||
"engines": {
|
||||
"pnpm": ">=8.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"// Main application commands": "",
|
||||
"start": "node dist/main.js",
|
||||
"dev": "vite",
|
||||
"command": "pnpm run build && clear && ./dist/cli.js",
|
||||
"// Build commands": "",
|
||||
"build": "vite build --mode=production",
|
||||
"postbuild": "chmod +x dist/main.js && chmod +x dist/cli.js",
|
||||
"build:docker": "./scripts/dc.sh run --rm builder",
|
||||
"build-and-pack": "tsx ./scripts/build.ts",
|
||||
"// Code generation commands": "",
|
||||
"codegen": "MOTHERSHIP_GRAPHQL_LINK='https://staging.mothership.unraid.net/ws' graphql-codegen --config codegen.ts -r dotenv/config './.env.staging'",
|
||||
"codegen:watch": "DOTENV_CONFIG_PATH='./.env.staging' graphql-codegen --config codegen.ts --watch -r dotenv/config",
|
||||
"codegen:local": "NODE_TLS_REJECT_UNAUTHORIZED=0 MOTHERSHIP_GRAPHQL_LINK='https://mothership.localhost/ws' graphql-codegen --config codegen.ts --watch",
|
||||
"// Development and quality tools": "",
|
||||
"tsc": "tsc --noEmit",
|
||||
"lint": "eslint --config .eslintrc.ts src/",
|
||||
"lint:fix": "eslint --fix --config .eslintrc.ts src/",
|
||||
"release": "standard-version",
|
||||
"// Testing commands": "",
|
||||
"test": "NODE_ENV=test vitest run",
|
||||
"test:watch": "NODE_ENV=test vitest --ui",
|
||||
"coverage": "NODE_ENV=test vitest run --coverage",
|
||||
"// Container management commands": "",
|
||||
"container:build": "./scripts/dc.sh build dev",
|
||||
"container:start": "pnpm run container:stop && ./scripts/dc.sh run --rm --service-ports dev",
|
||||
"container:stop": "./scripts/dc.sh stop dev",
|
||||
"container:test": "./scripts/dc.sh run --rm builder pnpm run test",
|
||||
"container:enter": "./scripts/dc.sh exec dev /bin/bash"
|
||||
},
|
||||
"bin": {
|
||||
"unraid-api": "dist/cli.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@apollo/client": "^3.11.8",
|
||||
"@apollo/server": "^4.11.2",
|
||||
"@as-integrations/fastify": "^2.1.1",
|
||||
"@fastify/cookie": "^9.4.0",
|
||||
"@graphql-codegen/client-preset": "^4.5.0",
|
||||
"@graphql-tools/load-files": "^7.0.0",
|
||||
"@graphql-tools/merge": "^9.0.8",
|
||||
"@graphql-tools/schema": "^10.0.7",
|
||||
"@graphql-tools/utils": "^10.5.5",
|
||||
"@nestjs/apollo": "^12.2.1",
|
||||
"@nestjs/common": "^10.4.7",
|
||||
"@nestjs/core": "^10.4.7",
|
||||
"@nestjs/graphql": "^12.2.1",
|
||||
"@nestjs/passport": "^10.0.3",
|
||||
"@nestjs/platform-fastify": "^10.4.7",
|
||||
"@nestjs/schedule": "^4.1.1",
|
||||
"@nestjs/throttler": "^6.2.1",
|
||||
"@reduxjs/toolkit": "^2.3.0",
|
||||
"@runonflux/nat-upnp": "^1.0.2",
|
||||
"@types/diff": "^7.0.1",
|
||||
"@unraid/libvirt": "^1.1.3",
|
||||
"accesscontrol": "^2.2.1",
|
||||
"bycontract": "^2.0.11",
|
||||
"bytes": "^3.1.2",
|
||||
"cacheable-lookup": "^7.0.0",
|
||||
"camelcase-keys": "^9.1.3",
|
||||
"casbin": "^5.32.0",
|
||||
"catch-exit": "^1.2.2",
|
||||
"chokidar": "^4.0.1",
|
||||
"cli-table": "^0.3.11",
|
||||
"command-exists": "^1.2.9",
|
||||
"convert": "^5.5.1",
|
||||
"cookie": "^1.0.2",
|
||||
"cron": "3.5.0",
|
||||
"cross-fetch": "^4.0.0",
|
||||
"diff": "^7.0.0",
|
||||
"docker-event-emitter": "^0.3.0",
|
||||
"dockerode": "^3.3.5",
|
||||
"dotenv": "^16.4.5",
|
||||
"execa": "^9.5.1",
|
||||
"exit-hook": "^4.0.0",
|
||||
"filenamify": "^6.0.0",
|
||||
"fs-extra": "^11.2.0",
|
||||
"glob": "^11.0.1",
|
||||
"global-agent": "^3.0.0",
|
||||
"got": "^14.4.4",
|
||||
"graphql": "^16.9.0",
|
||||
"graphql-fields": "^2.0.3",
|
||||
"graphql-scalars": "^1.23.0",
|
||||
"graphql-subscriptions": "^2.0.0",
|
||||
"graphql-tag": "^2.12.6",
|
||||
"graphql-type-json": "^0.3.2",
|
||||
"graphql-type-uuid": "^0.2.0",
|
||||
"graphql-ws": "^5.16.0",
|
||||
"ini": "^4.1.2",
|
||||
"ip": "^2.0.1",
|
||||
"jose": "^5.9.6",
|
||||
"lodash-es": "^4.17.21",
|
||||
"multi-ini": "^2.3.2",
|
||||
"mustache": "^4.2.0",
|
||||
"nest-authz": "^2.11.0",
|
||||
"nest-commander": "^3.15.0",
|
||||
"nestjs-pino": "^4.1.0",
|
||||
"node-cache": "^5.1.2",
|
||||
"node-window-polyfill": "^1.0.2",
|
||||
"p-retry": "^6.2.0",
|
||||
"passport-custom": "^1.1.1",
|
||||
"passport-http-header-strategy": "^1.1.0",
|
||||
"path-type": "^6.0.0",
|
||||
"pino": "^9.5.0",
|
||||
"pino-http": "^10.3.0",
|
||||
"pino-pretty": "^11.3.0",
|
||||
"pm2": "^5.4.2",
|
||||
"reflect-metadata": "^0.1.14",
|
||||
"request": "^2.88.2",
|
||||
"semver": "^7.6.3",
|
||||
"strftime": "^0.10.3",
|
||||
"systeminformation": "^5.25.11",
|
||||
"uuid": "^11.0.2",
|
||||
"ws": "^8.18.0",
|
||||
"zod": "^3.23.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@graphql-codegen/add": "^5.0.3",
|
||||
"@graphql-codegen/cli": "^5.0.3",
|
||||
"@graphql-codegen/fragment-matcher": "^5.0.2",
|
||||
"@graphql-codegen/import-types-preset": "^3.0.0",
|
||||
"@graphql-codegen/typed-document-node": "^5.0.11",
|
||||
"@graphql-codegen/typescript": "^4.1.1",
|
||||
"@graphql-codegen/typescript-operations": "^4.3.1",
|
||||
"@graphql-codegen/typescript-resolvers": "4.4.3",
|
||||
"@graphql-typed-document-node/core": "^3.2.0",
|
||||
"@ianvs/prettier-plugin-sort-imports": "^4.4.0",
|
||||
"@nestjs/testing": "^10.4.7",
|
||||
"@originjs/vite-plugin-commonjs": "^1.0.3",
|
||||
"@rollup/plugin-node-resolve": "^15.3.0",
|
||||
"@swc/core": "^1.10.1",
|
||||
"@types/async-exit-hook": "^2.0.2",
|
||||
"@types/bytes": "^3.1.4",
|
||||
"@types/cli-table": "^0.3.4",
|
||||
"@types/command-exists": "^1.2.3",
|
||||
"@types/cors": "^2.8.17",
|
||||
"@types/dockerode": "^3.3.31",
|
||||
"@types/graphql-fields": "^1.3.9",
|
||||
"@types/graphql-type-uuid": "^0.2.6",
|
||||
"@types/ini": "^4.1.1",
|
||||
"@types/ip": "^1.1.3",
|
||||
"@types/lodash": "^4.17.13",
|
||||
"@types/mustache": "^4.2.5",
|
||||
"@types/node": "^22.9.0",
|
||||
"@types/pify": "^5.0.4",
|
||||
"@types/semver": "^7.5.8",
|
||||
"@types/sendmail": "^1.4.7",
|
||||
"@types/stoppable": "^1.1.3",
|
||||
"@types/strftime": "^0.9.8",
|
||||
"@types/uuid": "^10.0.0",
|
||||
"@types/ws": "^8.5.13",
|
||||
"@types/wtfnode": "^0.7.3",
|
||||
"@vitest/coverage-v8": "^3.0.5",
|
||||
"@vitest/ui": "^3.0.5",
|
||||
"cz-conventional-changelog": "3.3.0",
|
||||
"eslint": "^9.14.0",
|
||||
"eslint-plugin-no-relative-import-paths": "^1.6.1",
|
||||
"eslint-plugin-prettier": "^5.2.3",
|
||||
"graphql-codegen-typescript-validation-schema": "^0.17.0",
|
||||
"jiti": "^2.4.0",
|
||||
"nodemon": "^3.1.7",
|
||||
"rollup-plugin-node-externals": "^7.1.3",
|
||||
"standard-version": "^9.5.0",
|
||||
"tsx": "^4.19.2",
|
||||
"typescript": "^5.6.3",
|
||||
"typescript-eslint": "^8.13.0",
|
||||
"unplugin-swc": "^1.5.1",
|
||||
"vite": "^5.4.14",
|
||||
"vite-plugin-node": "^4.0.0",
|
||||
"vite-tsconfig-paths": "^5.1.0",
|
||||
"vitest": "^3.0.5",
|
||||
"zx": "^8.3.2"
|
||||
},
|
||||
"overrides": {
|
||||
"eslint": {
|
||||
"jiti": "2"
|
||||
}
|
||||
},
|
||||
"private": true,
|
||||
"packageManager": "pnpm@10.4.1"
|
||||
"name": "@unraid/api",
|
||||
"version": "3.11.0",
|
||||
"main": "src/cli/index.ts",
|
||||
"type": "module",
|
||||
"repository": "git@github.com:unraid/api.git",
|
||||
"author": "Lime Technology, Inc. <unraid.net>",
|
||||
"license": "UNLICENSED",
|
||||
"scripts": {
|
||||
"start": "node dist/main.js",
|
||||
"build:docker": "./scripts/dc.sh run --rm builder",
|
||||
"build": "vite build --mode=production",
|
||||
"postbuild": "chmod +x dist/main.js && chmod +x dist/cli.js",
|
||||
"build-and-pack": "./scripts/build.mjs",
|
||||
"codegen": "MOTHERSHIP_GRAPHQL_LINK='https://staging.mothership.unraid.net/ws' graphql-codegen --config codegen.yml -r dotenv/config './.env.staging'",
|
||||
"codegen:watch": "DOTENV_CONFIG_PATH='./.env.staging' graphql-codegen --config codegen.yml --watch -r dotenv/config",
|
||||
"codegen:local": "NODE_TLS_REJECT_UNAUTHORIZED=0 MOTHERSHIP_GRAPHQL_LINK='https://mothership.localhost/ws' graphql-codegen-esm --config codegen.yml --watch",
|
||||
"tsc": "tsc --noEmit",
|
||||
"lint": "eslint --flag unstable_ts_config --config .eslintrc.ts src/",
|
||||
"lint:fix": "eslint --flag unstable_ts_config --fix --config .eslintrc.ts src/",
|
||||
"test:watch": "vitest --pool=forks",
|
||||
"test": "vitest run --pool=forks",
|
||||
"coverage": "vitest run --pool=forks --coverage",
|
||||
"release": "standard-version",
|
||||
"dev": "vite",
|
||||
"container:build": "./scripts/dc.sh build dev",
|
||||
"container:start": "./scripts/dc.sh run --rm --service-ports dev",
|
||||
"container:test": "./scripts/dc.sh run --rm builder npm run test",
|
||||
"container:enter": "./scripts/dc.sh exec dev /bin/bash"
|
||||
},
|
||||
"files": [
|
||||
".env.staging",
|
||||
".env.production",
|
||||
"ecosystem.config.json",
|
||||
"README.md",
|
||||
"src",
|
||||
"node_modules/"
|
||||
],
|
||||
"bin": {
|
||||
"unraid-api": "dist/cli.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@apollo/client": "^3.11.8",
|
||||
"@apollo/server": "^4.11.2",
|
||||
"@as-integrations/fastify": "^2.1.1",
|
||||
"@fastify/cookie": "^9.4.0",
|
||||
"@graphql-codegen/client-preset": "^4.5.0",
|
||||
"@graphql-tools/load-files": "^7.0.0",
|
||||
"@graphql-tools/merge": "^9.0.8",
|
||||
"@graphql-tools/schema": "^10.0.7",
|
||||
"@graphql-tools/utils": "^10.5.5",
|
||||
"@nestjs/apollo": "^12.2.1",
|
||||
"@nestjs/core": "^10.4.7",
|
||||
"@nestjs/graphql": "^12.2.1",
|
||||
"@nestjs/passport": "^10.0.3",
|
||||
"@nestjs/platform-fastify": "^10.4.7",
|
||||
"@nestjs/schedule": "^4.1.1",
|
||||
"@reduxjs/toolkit": "^2.3.0",
|
||||
"@reflet/cron": "^1.3.1",
|
||||
"@runonflux/nat-upnp": "^1.0.2",
|
||||
"accesscontrol": "^2.2.1",
|
||||
"btoa": "^1.2.1",
|
||||
"bycontract": "^2.0.11",
|
||||
"bytes": "^3.1.2",
|
||||
"cacheable-lookup": "^7.0.0",
|
||||
"camelcase-keys": "^9.1.3",
|
||||
"catch-exit": "^1.2.2",
|
||||
"chokidar": "^4.0.1",
|
||||
"cli-table": "^0.3.11",
|
||||
"command-exists": "^1.2.9",
|
||||
"convert": "^5.5.1",
|
||||
"cross-fetch": "^4.0.0",
|
||||
"docker-event-emitter": "^0.3.0",
|
||||
"dockerode": "^3.3.5",
|
||||
"dotenv": "^16.4.5",
|
||||
"execa": "^9.5.1",
|
||||
"exit-hook": "^4.0.0",
|
||||
"express": "^4.21.1",
|
||||
"filenamify": "^6.0.0",
|
||||
"fs-extra": "^11.2.0",
|
||||
"global-agent": "^3.0.0",
|
||||
"got": "^14.4.4",
|
||||
"graphql": "^16.9.0",
|
||||
"graphql-fields": "^2.0.3",
|
||||
"graphql-scalars": "^1.23.0",
|
||||
"graphql-subscriptions": "^2.0.0",
|
||||
"graphql-tag": "^2.12.6",
|
||||
"graphql-type-json": "^0.3.2",
|
||||
"graphql-type-uuid": "^0.2.0",
|
||||
"graphql-ws": "^5.16.0",
|
||||
"ini": "^4.1.2",
|
||||
"ip": "^2.0.1",
|
||||
"ip-regex": "^5.0.0",
|
||||
"jose": "^5.9.6",
|
||||
"lodash-es": "^4.17.21",
|
||||
"multi-ini": "^2.3.2",
|
||||
"mustache": "^4.2.0",
|
||||
"nest-access-control": "^3.1.0",
|
||||
"nest-authz": "^2.11.0",
|
||||
"nestjs-pino": "^4.1.0",
|
||||
"node-cache": "^5.1.2",
|
||||
"node-window-polyfill": "^1.0.2",
|
||||
"openid-client": "^6.1.3",
|
||||
"p-retry": "^6.2.0",
|
||||
"passport-custom": "^1.1.1",
|
||||
"passport-http-header-strategy": "^1.1.0",
|
||||
"path-type": "^6.0.0",
|
||||
"pidusage": "^3.0.2",
|
||||
"pino": "^9.5.0",
|
||||
"pino-http": "^10.3.0",
|
||||
"pino-pretty": "^11.3.0",
|
||||
"pm2": "^5.4.2",
|
||||
"reflect-metadata": "^0.1.14",
|
||||
"request": "^2.88.2",
|
||||
"semver": "^7.6.3",
|
||||
"stoppable": "^1.1.0",
|
||||
"systeminformation": "^5.23.5",
|
||||
"ts-command-line-args": "^2.5.1",
|
||||
"uuid": "^11.0.2",
|
||||
"ws": "^8.18.0",
|
||||
"xhr2": "^0.2.1",
|
||||
"zod": "^3.23.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@graphql-codegen/add": "^5.0.3",
|
||||
"@graphql-codegen/cli": "^5.0.3",
|
||||
"@graphql-codegen/fragment-matcher": "^5.0.2",
|
||||
"@graphql-codegen/import-types-preset": "^3.0.0",
|
||||
"@graphql-codegen/typed-document-node": "^5.0.11",
|
||||
"@graphql-codegen/typescript": "^4.1.1",
|
||||
"@graphql-codegen/typescript-operations": "^4.3.1",
|
||||
"@graphql-codegen/typescript-resolvers": "4.4.0",
|
||||
"@graphql-typed-document-node/core": "^3.2.0",
|
||||
"@ianvs/prettier-plugin-sort-imports": "^4.4.0",
|
||||
"@nestjs/testing": "^10.4.7",
|
||||
"@originjs/vite-plugin-commonjs": "^1.0.3",
|
||||
"@rollup/plugin-node-resolve": "^15.3.0",
|
||||
"@types/async-exit-hook": "^2.0.2",
|
||||
"@types/btoa": "^1.2.5",
|
||||
"@types/bytes": "^3.1.4",
|
||||
"@types/cli-table": "^0.3.4",
|
||||
"@types/command-exists": "^1.2.3",
|
||||
"@types/cors": "^2.8.17",
|
||||
"@types/dockerode": "^3.3.31",
|
||||
"@types/express": "^5.0.0",
|
||||
"@types/graphql-fields": "^1.3.9",
|
||||
"@types/graphql-type-uuid": "^0.2.6",
|
||||
"@types/ini": "^4.1.1",
|
||||
"@types/ip": "^1.1.3",
|
||||
"@types/lodash": "^4.17.13",
|
||||
"@types/mustache": "^4.2.5",
|
||||
"@types/node": "^22.9.0",
|
||||
"@types/pidusage": "^2.0.5",
|
||||
"@types/pify": "^5.0.4",
|
||||
"@types/semver": "^7.5.8",
|
||||
"@types/sendmail": "^1.4.7",
|
||||
"@types/stoppable": "^1.1.3",
|
||||
"@types/uuid": "^10.0.0",
|
||||
"@types/ws": "^8.5.13",
|
||||
"@types/wtfnode": "^0.7.3",
|
||||
"@vitest/coverage-v8": "^2.1.4",
|
||||
"@vitest/ui": "^2.1.4",
|
||||
"cz-conventional-changelog": "3.3.0",
|
||||
"eslint": "^9.14.0",
|
||||
"graphql-codegen-typescript-validation-schema": "^0.16.0",
|
||||
"jiti": "^2.4.0",
|
||||
"rollup-plugin-node-externals": "^7.1.3",
|
||||
"standard-version": "^9.5.0",
|
||||
"typescript": "^5.6.3",
|
||||
"typescript-eslint": "^8.13.0",
|
||||
"vite": "^5.4.10",
|
||||
"vite-plugin-node": "^4.0.0",
|
||||
"vite-plugin-static-copy": "^2.0.0",
|
||||
"vite-tsconfig-paths": "^5.1.0",
|
||||
"vitest": "^2.1.4",
|
||||
"zx": "^8.2.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@vmngr/libvirt": "github:unraid/libvirt"
|
||||
},
|
||||
"overrides": {
|
||||
"eslint": {
|
||||
"jiti": "2"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
84
api/scripts/build.mjs
Executable file
84
api/scripts/build.mjs
Executable file
@@ -0,0 +1,84 @@
|
||||
#!/usr/bin/env zx
|
||||
import { exit } from 'process';
|
||||
import { cd, $ } from 'zx';
|
||||
|
||||
import { getDeploymentVersion } from './get-deployment-version.mjs';
|
||||
|
||||
try {
|
||||
// Enable colours in output
|
||||
process.env.FORCE_COLOR = '1';
|
||||
|
||||
// Ensure we have the correct working directory
|
||||
process.env.WORKDIR ??= process.env.PWD;
|
||||
cd(process.env.WORKDIR);
|
||||
|
||||
// Create deployment directories - ignore if they already exist
|
||||
await $`mkdir -p ./deploy/release`;
|
||||
await $`mkdir -p ./deploy/pre-pack`;
|
||||
|
||||
await $`rm -rf ./deploy/release/*`;
|
||||
await $`rm -rf ./deploy/pre-pack/*`;
|
||||
|
||||
// Build Generated Types
|
||||
await $`npm run codegen`;
|
||||
|
||||
await $`npm run build`;
|
||||
// Copy app files to plugin directory
|
||||
await $`cp -r ./src/ ./deploy/pre-pack/src/`;
|
||||
await $`cp -r ./dist/ ./deploy/pre-pack/dist/`;
|
||||
|
||||
// Copy environment to deployment directory
|
||||
const files = [
|
||||
'.env.production',
|
||||
'.env.staging',
|
||||
'tsconfig.json',
|
||||
'codegen.yml',
|
||||
'ecosystem.config.json'
|
||||
]
|
||||
|
||||
for (const file of files) {
|
||||
await $`cp ./${file} ./deploy/pre-pack/${file}`;
|
||||
}
|
||||
|
||||
// Get package details
|
||||
const { name, version, ...rest } = await import('../package.json', {
|
||||
assert: { type: 'json' },
|
||||
}).then((pkg) => pkg.default);
|
||||
|
||||
const deploymentVersion = getDeploymentVersion(process.env, version);
|
||||
|
||||
// Create deployment package.json
|
||||
await $`echo ${JSON.stringify({
|
||||
...rest,
|
||||
name,
|
||||
version: deploymentVersion,
|
||||
})} > ./deploy/pre-pack/package.json`;
|
||||
|
||||
// # Create final tgz
|
||||
await $`cp ./README.md ./deploy/pre-pack/`;
|
||||
|
||||
await $`cp -r ./node_modules ./deploy/pre-pack/node_modules`;
|
||||
// Install production dependencies
|
||||
cd('./deploy/pre-pack');
|
||||
|
||||
await $`npm prune --omit=dev`;
|
||||
await $`npm install --omit=dev`;
|
||||
|
||||
// Now we'll pack everything in the pre-pack directory
|
||||
await $`tar -czf ../unraid-api-${deploymentVersion}.tgz .`;
|
||||
|
||||
// Move unraid-api.tgz to release directory
|
||||
await $`mv ../unraid-api-${deploymentVersion}.tgz ../release`;
|
||||
} catch (error) {
|
||||
// Error with a command
|
||||
if (Object.keys(error).includes('stderr')) {
|
||||
console.log(`Failed building package. Exit code: ${error.exitCode}`);
|
||||
console.log(`Error: ${error.stderr}`);
|
||||
} else {
|
||||
// Normal js error
|
||||
console.log('Failed building package.');
|
||||
console.log(`Error: ${error.message}`);
|
||||
}
|
||||
|
||||
exit(error.exitCode);
|
||||
}
|
||||
@@ -1,67 +0,0 @@
|
||||
#!/usr/bin/env zx
|
||||
import { mkdir, readFile, rm, writeFile } from 'fs/promises';
|
||||
import { exit } from 'process';
|
||||
|
||||
import { $, cd } from 'zx';
|
||||
|
||||
import { getDeploymentVersion } from './get-deployment-version.js';
|
||||
|
||||
try {
|
||||
// Create release and pack directories
|
||||
// Clean existing deploy folder
|
||||
await rm('./deploy', { recursive: true }).catch(() => {});
|
||||
await mkdir('./deploy/release', { recursive: true });
|
||||
await mkdir('./deploy/pack', { recursive: true });
|
||||
|
||||
// Build Generated Types
|
||||
await $`pnpm run codegen`;
|
||||
|
||||
await $`pnpm run build`;
|
||||
// Copy app files to plugin directory
|
||||
|
||||
// Get package details
|
||||
const packageJson = await readFile('./package.json', 'utf-8');
|
||||
const parsedPackageJson = JSON.parse(packageJson);
|
||||
|
||||
const deploymentVersion = await getDeploymentVersion(process.env, parsedPackageJson.version);
|
||||
|
||||
// Update the package.json version to the deployment version
|
||||
parsedPackageJson.version = deploymentVersion;
|
||||
|
||||
// Create a temporary directory for packaging
|
||||
await mkdir('./deploy/pack/', { recursive: true });
|
||||
|
||||
await writeFile('./deploy/pack/package.json', JSON.stringify(parsedPackageJson, null, 4));
|
||||
// Copy necessary files to the pack directory
|
||||
await $`cp -r dist README.md .env.* ecosystem.config.json ./deploy/pack/`;
|
||||
|
||||
// Change to the pack directory and install dependencies
|
||||
cd('./deploy/pack');
|
||||
|
||||
console.log('Installing production dependencies...');
|
||||
$.verbose = true;
|
||||
await $`pnpm install --prod --ignore-workspace --node-linker hoisted`;
|
||||
|
||||
// chmod the cli
|
||||
await $`chmod +x ./dist/cli.js`;
|
||||
await $`chmod +x ./dist/main.js`;
|
||||
|
||||
// Create the tarball
|
||||
await $`tar -czf ../release/unraid-api.tgz ./`;
|
||||
|
||||
// Clean up
|
||||
cd('..');
|
||||
|
||||
} catch (error) {
|
||||
// Error with a command
|
||||
if (Object.keys(error).includes('stderr')) {
|
||||
console.log(`Failed building package. Exit code: ${error.exitCode}`);
|
||||
console.log(`Error: ${error.stderr}`);
|
||||
} else {
|
||||
// Normal js error
|
||||
console.log('Failed building package.');
|
||||
console.log(`Error: ${error.message}`);
|
||||
}
|
||||
|
||||
exit(error.exitCode);
|
||||
}
|
||||
@@ -7,7 +7,7 @@
|
||||
# By default, this is my-session
|
||||
|
||||
sessions_dir=/var/lib/php
|
||||
default_session_name=mock-user-session
|
||||
default_session_name=my-session
|
||||
|
||||
if [ "$1" = "--help" ]; then
|
||||
echo "This script creates a mock session on a server."
|
||||
@@ -15,7 +15,7 @@ if [ "$1" = "--help" ]; then
|
||||
echo "Usage: $0 [options]"
|
||||
echo ""
|
||||
echo "Options:"
|
||||
echo " [name] Name of the session to create (default: mock-user-session)"
|
||||
echo " [name] Name of the session to create (default: my-session)"
|
||||
echo " --help Display this help message and exit"
|
||||
echo ""
|
||||
echo "Example: $0 a-session-name"
|
||||
|
||||
@@ -23,18 +23,13 @@ fi
|
||||
echo "$server_name" > "$state_file"
|
||||
|
||||
# Source directory path
|
||||
source_directory="./dist"
|
||||
source_directory="./src"
|
||||
|
||||
if [ ! -d "$source_directory" ]; then
|
||||
echo "The dist directory does not exist. Attempting build..."
|
||||
npm run build
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Build failed!"
|
||||
exit 1
|
||||
fi
|
||||
echo "The src directory does not exist."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Change ownership on copy
|
||||
# Replace the value inside the rsync command with the user's input
|
||||
rsync_command="rsync -avz -e ssh $source_directory root@${server_name}:/usr/local/unraid-api"
|
||||
|
||||
@@ -45,11 +40,8 @@ echo "$rsync_command"
|
||||
eval "$rsync_command"
|
||||
exit_code=$?
|
||||
|
||||
# Chown the directory
|
||||
ssh root@"${server_name}" "chown -R root:root /usr/local/unraid-api"
|
||||
|
||||
# Run unraid-api restart on remote host
|
||||
ssh root@"${server_name}" "INTROSPECTION=true LOG_LEVEL=trace unraid-api restart"
|
||||
ssh root@"${server_name}" "unraid-api restart"
|
||||
|
||||
# Play built-in sound based on the operating system
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
@@ -64,5 +56,4 @@ elif [[ "$OSTYPE" == "msys" || "$OSTYPE" == "win32" ]]; then
|
||||
fi
|
||||
|
||||
# Exit with the rsync command's exit code
|
||||
exit $exit_code
|
||||
|
||||
exit $exit_code
|
||||
@@ -1,16 +1,15 @@
|
||||
import { execa } from 'execa';
|
||||
import { execSync } from 'child_process';
|
||||
|
||||
const runCommand = async (command: string, args: string[]) => {
|
||||
const runCommand = (command) => {
|
||||
try {
|
||||
const { stdout } = await execa(command, args);
|
||||
return stdout.trim();
|
||||
return execSync(command, { stdio: 'pipe' }).toString().trim();
|
||||
} catch (error) {
|
||||
console.log('Failed to execute command:', command, args.join(' '), error.message);
|
||||
return undefined;
|
||||
console.log('Failed to get value from tag command: ', command, error.message);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
export const getDeploymentVersion = async (env = process.env, packageVersion: string) => {
|
||||
export const getDeploymentVersion = (env = process.env, packageVersion) => {
|
||||
if (env.API_VERSION) {
|
||||
console.log(`Using env var for version: ${env.API_VERSION}`);
|
||||
return env.API_VERSION;
|
||||
@@ -18,11 +17,9 @@ export const getDeploymentVersion = async (env = process.env, packageVersion: st
|
||||
console.log(`Using env vars for git tags: ${env.GIT_SHA} ${env.IS_TAGGED}`);
|
||||
return env.IS_TAGGED ? packageVersion : `${packageVersion}+${env.GIT_SHA}`;
|
||||
} else {
|
||||
const gitShortSHA = await runCommand('git', ['rev-parse', '--short', 'HEAD']);
|
||||
const isCommitTagged = await runCommand('git', ['describe', '--tags', '--abbrev=0', '--exact-match']) !== undefined;
|
||||
|
||||
const gitShortSHA = runCommand('git rev-parse --short HEAD');
|
||||
const isCommitTagged = runCommand('git describe --tags --abbrev=0 --exact-match') !== undefined;
|
||||
console.log('gitShortSHA', gitShortSHA, 'isCommitTagged', isCommitTagged);
|
||||
|
||||
if (!gitShortSHA) {
|
||||
console.error('Failed to get git short SHA');
|
||||
process.exit(1);
|
||||
@@ -1,44 +1,48 @@
|
||||
import { getAllowedOrigins } from '@app/common/allowed-origins';
|
||||
import { store } from '@app/store/index';
|
||||
import { loadConfigFile } from '@app/store/modules/config';
|
||||
import { loadStateFiles } from '@app/store/modules/emhttp';
|
||||
|
||||
import 'reflect-metadata';
|
||||
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
// Preloading imports for faster tests
|
||||
import '@app/common/allowed-origins';
|
||||
import '@app/store/modules/emhttp';
|
||||
import '@app/store';
|
||||
|
||||
test('Returns allowed origins', async () => {
|
||||
const { store } = await import('@app/store');
|
||||
const { loadStateFiles } = await import('@app/store/modules/emhttp');
|
||||
const { getAllowedOrigins } = await import('@app/common/allowed-origins');
|
||||
const { loadConfigFile } = await import('@app/store/modules/config');
|
||||
|
||||
// Load state files into store
|
||||
await store.dispatch(loadStateFiles());
|
||||
await store.dispatch(loadConfigFile());
|
||||
|
||||
// Get allowed origins
|
||||
expect(getAllowedOrigins()).toMatchInlineSnapshot(`
|
||||
[
|
||||
"/var/run/unraid-notifications.sock",
|
||||
"/var/run/unraid-php.sock",
|
||||
"/var/run/unraid-cli.sock",
|
||||
"http://localhost:8080",
|
||||
"https://localhost:4443",
|
||||
"https://tower.local:4443",
|
||||
"https://192.168.1.150:4443",
|
||||
"https://tower:4443",
|
||||
"https://192-168-1-150.thisisfourtyrandomcharacters012345678900.myunraid.net:4443",
|
||||
"https://85-121-123-122.thisisfourtyrandomcharacters012345678900.myunraid.net:8443",
|
||||
"https://10-252-0-1.hash.myunraid.net:4443",
|
||||
"https://10-252-1-1.hash.myunraid.net:4443",
|
||||
"https://10-253-3-1.hash.myunraid.net:4443",
|
||||
"https://10-253-4-1.hash.myunraid.net:4443",
|
||||
"https://10-253-5-1.hash.myunraid.net:4443",
|
||||
"https://10-100-0-1.hash.myunraid.net:4443",
|
||||
"https://10-100-0-2.hash.myunraid.net:4443",
|
||||
"https://10-123-1-2.hash.myunraid.net:4443",
|
||||
"https://221-123-121-112.hash.myunraid.net:4443",
|
||||
"https://google.com",
|
||||
"https://test.com",
|
||||
"https://connect.myunraid.net",
|
||||
"https://connect-staging.myunraid.net",
|
||||
"https://dev-my.myunraid.net:4000",
|
||||
]
|
||||
`);
|
||||
[
|
||||
"/var/run/unraid-notifications.sock",
|
||||
"/var/run/unraid-php.sock",
|
||||
"/var/run/unraid-cli.sock",
|
||||
"http://localhost:8080",
|
||||
"https://localhost:4443",
|
||||
"https://tower.local:4443",
|
||||
"https://192.168.1.150:4443",
|
||||
"https://tower:4443",
|
||||
"https://192-168-1-150.thisisfourtyrandomcharacters012345678900.myunraid.net:4443",
|
||||
"https://85-121-123-122.thisisfourtyrandomcharacters012345678900.myunraid.net:8443",
|
||||
"https://10-252-0-1.hash.myunraid.net:4443",
|
||||
"https://10-252-1-1.hash.myunraid.net:4443",
|
||||
"https://10-253-3-1.hash.myunraid.net:4443",
|
||||
"https://10-253-4-1.hash.myunraid.net:4443",
|
||||
"https://10-253-5-1.hash.myunraid.net:4443",
|
||||
"https://10-100-0-1.hash.myunraid.net:4443",
|
||||
"https://10-100-0-2.hash.myunraid.net:4443",
|
||||
"https://10-123-1-2.hash.myunraid.net:4443",
|
||||
"https://221-123-121-112.hash.myunraid.net:4443",
|
||||
"https://google.com",
|
||||
"https://test.com",
|
||||
"https://connect.myunraid.net",
|
||||
"https://connect-staging.myunraid.net",
|
||||
"https://dev-my.myunraid.net:4000",
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
457
api/src/__test__/core/__snapshots__/permissions.test.ts.snap
Normal file
457
api/src/__test__/core/__snapshots__/permissions.test.ts.snap
Normal file
@@ -0,0 +1,457 @@
|
||||
// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html
|
||||
|
||||
exports[`Returns default permissions 1`] = `
|
||||
RolesBuilder {
|
||||
"_grants": {
|
||||
"admin": {
|
||||
"$extend": [
|
||||
"guest",
|
||||
],
|
||||
"apikey": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"array": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"cloud": {
|
||||
"read:own": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"config": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
"update:own": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"connect": {
|
||||
"read:own": [
|
||||
"*",
|
||||
],
|
||||
"update:own": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"cpu": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"crash-reporting-enabled": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"customizations": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"device": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"device/unassigned": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"disk": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"disk/settings": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"display": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"docker/container": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"docker/network": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"flash": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"info": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"license-key": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"logs": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"machine-id": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"memory": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"notifications": {
|
||||
"create:any": [
|
||||
"*",
|
||||
],
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"online": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"os": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"owner": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"parity-history": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"permission": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"registration": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"servers": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"service": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"service/emhttpd": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"service/unraid-api": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"services": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"share": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"software-versions": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"unraid-version": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"uptime": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"user": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"vars": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"vms": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"vms/domain": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"vms/network": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
},
|
||||
"guest": {
|
||||
"me": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"welcome": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
},
|
||||
"my_servers": {
|
||||
"$extend": [
|
||||
"guest",
|
||||
],
|
||||
"array": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"config": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"connect": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"connect/dynamic-remote-access": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
"update:own": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"customizations": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"dashboard": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"display": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"docker": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"docker/container": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"info": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"logs": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"network": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"notifications": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"services": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"unraid-version": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"vars": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"vms": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"vms/domain": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
},
|
||||
"notifier": {
|
||||
"$extend": [
|
||||
"guest",
|
||||
],
|
||||
"notifications": {
|
||||
"create:own": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
},
|
||||
"upc": {
|
||||
"$extend": [
|
||||
"guest",
|
||||
],
|
||||
"apikey": {
|
||||
"read:own": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"cloud": {
|
||||
"read:own": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"config": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
"update:own": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"connect": {
|
||||
"read:own": [
|
||||
"*",
|
||||
],
|
||||
"update:own": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"crash-reporting-enabled": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"customizations": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"disk": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"display": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"flash": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"info": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"logs": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"notifications": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
"update:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"os": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"owner": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"permission": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"registration": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"servers": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
"vars": {
|
||||
"read:any": [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
"_isLocked": false,
|
||||
}
|
||||
`;
|
||||
@@ -1,209 +1,212 @@
|
||||
import { expect, test, vi } from 'vitest';
|
||||
|
||||
import { getArrayData } from '@app/core/modules/array/get-array-data';
|
||||
import { store } from '@app/store';
|
||||
import { loadConfigFile } from '@app/store/modules/config';
|
||||
import { loadStateFiles } from '@app/store/modules/emhttp';
|
||||
import { test, expect, vi } from 'vitest';
|
||||
|
||||
vi.mock('@app/core/pubsub', () => ({
|
||||
pubsub: { publish: vi.fn() },
|
||||
}));
|
||||
|
||||
test('Creates an array event', async () => {
|
||||
const { getArrayData } = await import(
|
||||
'@app/core/modules/array/get-array-data'
|
||||
);
|
||||
const { store } = await import('@app/store');
|
||||
const { loadStateFiles } = await import('@app/store/modules/emhttp');
|
||||
const { loadConfigFile } = await import('@app/store/modules/config');
|
||||
// Load state files into store
|
||||
await store.dispatch(loadStateFiles());
|
||||
|
||||
await store.dispatch(loadConfigFile());
|
||||
|
||||
const arrayEvent = getArrayData(store.getState);
|
||||
expect(arrayEvent).toMatchObject({
|
||||
boot: {
|
||||
comment: 'Unraid OS boot device',
|
||||
critical: null,
|
||||
device: 'sda',
|
||||
exportable: true,
|
||||
format: 'unknown',
|
||||
fsFree: 3191407,
|
||||
fsSize: 4042732,
|
||||
fsType: 'vfat',
|
||||
fsUsed: 851325,
|
||||
id: 'Cruzer',
|
||||
idx: 32,
|
||||
name: 'flash',
|
||||
numErrors: 0,
|
||||
numReads: 0,
|
||||
numWrites: 0,
|
||||
rotational: true,
|
||||
size: 3956700,
|
||||
status: 'DISK_OK',
|
||||
temp: null,
|
||||
transport: 'usb',
|
||||
type: 'Flash',
|
||||
warning: null,
|
||||
expect(arrayEvent).toMatchObject(
|
||||
{
|
||||
"boot": {
|
||||
"comment": "Unraid OS boot device",
|
||||
"critical": null,
|
||||
"device": "sda",
|
||||
"exportable": true,
|
||||
"format": "unknown",
|
||||
"fsFree": 3191407,
|
||||
"fsSize": 4042732,
|
||||
"fsType": "vfat",
|
||||
"fsUsed": 851325,
|
||||
"id": "Cruzer",
|
||||
"idx": 32,
|
||||
"name": "flash",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": true,
|
||||
"size": 3956700,
|
||||
"status": "DISK_OK",
|
||||
"temp": null,
|
||||
"transport": "usb",
|
||||
"type": "Flash",
|
||||
"warning": null,
|
||||
},
|
||||
caches: [
|
||||
{
|
||||
comment: '',
|
||||
critical: null,
|
||||
device: 'sdi',
|
||||
exportable: false,
|
||||
format: 'MBR: 4KiB-aligned',
|
||||
fsFree: 111810683,
|
||||
fsSize: 250059317,
|
||||
fsType: 'btrfs',
|
||||
fsUsed: 137273827,
|
||||
id: 'Samsung_SSD_850_EVO_250GB_S2R5NX0H643734Z',
|
||||
idx: 30,
|
||||
name: 'cache',
|
||||
numErrors: 0,
|
||||
numReads: 0,
|
||||
numWrites: 0,
|
||||
rotational: false,
|
||||
size: 244198552,
|
||||
status: 'DISK_OK',
|
||||
temp: 22,
|
||||
transport: 'ata',
|
||||
type: 'Cache',
|
||||
warning: null,
|
||||
},
|
||||
{
|
||||
comment: null,
|
||||
critical: null,
|
||||
device: 'nvme0n1',
|
||||
exportable: false,
|
||||
format: 'MBR: 4KiB-aligned',
|
||||
fsFree: null,
|
||||
fsSize: null,
|
||||
fsType: null,
|
||||
fsUsed: null,
|
||||
id: 'KINGSTON_SA2000M8250G_50026B7282669D9E',
|
||||
idx: 31,
|
||||
name: 'cache2',
|
||||
numErrors: 0,
|
||||
numReads: 0,
|
||||
numWrites: 0,
|
||||
rotational: false,
|
||||
size: 244198552,
|
||||
status: 'DISK_OK',
|
||||
temp: 27,
|
||||
transport: 'nvme',
|
||||
type: 'Cache',
|
||||
warning: null,
|
||||
},
|
||||
"caches": [
|
||||
{
|
||||
"comment": "",
|
||||
"critical": null,
|
||||
"device": "sdi",
|
||||
"exportable": false,
|
||||
"format": "MBR: 4KiB-aligned",
|
||||
"fsFree": 111810683,
|
||||
"fsSize": 250059317,
|
||||
"fsType": "btrfs",
|
||||
"fsUsed": 137273827,
|
||||
"id": "Samsung_SSD_850_EVO_250GB_S2R5NX0H643734Z",
|
||||
"idx": 30,
|
||||
"name": "cache",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": false,
|
||||
"size": 244198552,
|
||||
"status": "DISK_OK",
|
||||
"temp": 22,
|
||||
"transport": "ata",
|
||||
"type": "Cache",
|
||||
"warning": null,
|
||||
},
|
||||
{
|
||||
"comment": null,
|
||||
"critical": null,
|
||||
"device": "nvme0n1",
|
||||
"exportable": false,
|
||||
"format": "MBR: 4KiB-aligned",
|
||||
"fsFree": null,
|
||||
"fsSize": null,
|
||||
"fsType": null,
|
||||
"fsUsed": null,
|
||||
"id": "KINGSTON_SA2000M8250G_50026B7282669D9E",
|
||||
"idx": 31,
|
||||
"name": "cache2",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": false,
|
||||
"size": 244198552,
|
||||
"status": "DISK_OK",
|
||||
"temp": 27,
|
||||
"transport": "nvme",
|
||||
"type": "Cache",
|
||||
"warning": null,
|
||||
},
|
||||
],
|
||||
capacity: {
|
||||
disks: {
|
||||
free: '27',
|
||||
total: '30',
|
||||
used: '3',
|
||||
},
|
||||
kilobytes: {
|
||||
free: '19495825571',
|
||||
total: '41994745901',
|
||||
used: '22498920330',
|
||||
},
|
||||
"capacity": {
|
||||
"disks": {
|
||||
"free": "27",
|
||||
"total": "30",
|
||||
"used": "3",
|
||||
},
|
||||
"kilobytes": {
|
||||
"free": "19495825571",
|
||||
"total": "41994745901",
|
||||
"used": "22498920330",
|
||||
},
|
||||
},
|
||||
disks: [
|
||||
{
|
||||
comment: 'Seagate Exos',
|
||||
critical: 75,
|
||||
device: 'sdf',
|
||||
exportable: false,
|
||||
format: 'GPT: 4KiB-aligned',
|
||||
fsFree: 13882739732,
|
||||
fsSize: 17998742753,
|
||||
fsType: 'xfs',
|
||||
fsUsed: 4116003021,
|
||||
id: 'ST18000NM000J-2TV103_ZR5B1W9X',
|
||||
idx: 1,
|
||||
name: 'disk1',
|
||||
numErrors: 0,
|
||||
numReads: 0,
|
||||
numWrites: 0,
|
||||
rotational: true,
|
||||
size: 17578328012,
|
||||
status: 'DISK_OK',
|
||||
temp: 30,
|
||||
transport: 'ata',
|
||||
type: 'Data',
|
||||
warning: 50,
|
||||
},
|
||||
{
|
||||
comment: '',
|
||||
critical: null,
|
||||
device: 'sdj',
|
||||
exportable: false,
|
||||
format: 'GPT: 4KiB-aligned',
|
||||
fsFree: 93140746,
|
||||
fsSize: 11998001574,
|
||||
fsType: 'xfs',
|
||||
fsUsed: 11904860828,
|
||||
id: 'WDC_WD120EDAZ-11F3RA0_5PJRD45C',
|
||||
idx: 2,
|
||||
name: 'disk2',
|
||||
numErrors: 0,
|
||||
numReads: 0,
|
||||
numWrites: 0,
|
||||
rotational: true,
|
||||
size: 11718885324,
|
||||
status: 'DISK_OK',
|
||||
temp: 30,
|
||||
transport: 'ata',
|
||||
type: 'Data',
|
||||
warning: null,
|
||||
},
|
||||
{
|
||||
comment: '',
|
||||
critical: null,
|
||||
device: 'sde',
|
||||
exportable: false,
|
||||
format: 'GPT: 4KiB-aligned',
|
||||
fsFree: 5519945093,
|
||||
fsSize: 11998001574,
|
||||
fsType: 'xfs',
|
||||
fsUsed: 6478056481,
|
||||
id: 'WDC_WD120EMAZ-11BLFA0_5PH8BTYD',
|
||||
idx: 3,
|
||||
name: 'disk3',
|
||||
numErrors: 0,
|
||||
numReads: 0,
|
||||
numWrites: 0,
|
||||
rotational: true,
|
||||
size: 11718885324,
|
||||
status: 'DISK_OK',
|
||||
temp: 30,
|
||||
transport: 'ata',
|
||||
type: 'Data',
|
||||
warning: null,
|
||||
},
|
||||
"disks": [
|
||||
{
|
||||
"comment": "Seagate Exos",
|
||||
"critical": 75,
|
||||
"device": "sdf",
|
||||
"exportable": false,
|
||||
"format": "GPT: 4KiB-aligned",
|
||||
"fsFree": 13882739732,
|
||||
"fsSize": 17998742753,
|
||||
"fsType": "xfs",
|
||||
"fsUsed": 4116003021,
|
||||
"id": "ST18000NM000J-2TV103_ZR5B1W9X",
|
||||
"idx": 1,
|
||||
"name": "disk1",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": true,
|
||||
"size": 17578328012,
|
||||
"status": "DISK_OK",
|
||||
"temp": 30,
|
||||
"transport": "ata",
|
||||
"type": "Data",
|
||||
"warning": 50,
|
||||
},
|
||||
{
|
||||
"comment": "",
|
||||
"critical": null,
|
||||
"device": "sdj",
|
||||
"exportable": false,
|
||||
"format": "GPT: 4KiB-aligned",
|
||||
"fsFree": 93140746,
|
||||
"fsSize": 11998001574,
|
||||
"fsType": "xfs",
|
||||
"fsUsed": 11904860828,
|
||||
"id": "WDC_WD120EDAZ-11F3RA0_5PJRD45C",
|
||||
"idx": 2,
|
||||
"name": "disk2",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": true,
|
||||
"size": 11718885324,
|
||||
"status": "DISK_OK",
|
||||
"temp": 30,
|
||||
"transport": "ata",
|
||||
"type": "Data",
|
||||
"warning": null,
|
||||
},
|
||||
{
|
||||
"comment": "",
|
||||
"critical": null,
|
||||
"device": "sde",
|
||||
"exportable": false,
|
||||
"format": "GPT: 4KiB-aligned",
|
||||
"fsFree": 5519945093,
|
||||
"fsSize": 11998001574,
|
||||
"fsType": "xfs",
|
||||
"fsUsed": 6478056481,
|
||||
"id": "WDC_WD120EMAZ-11BLFA0_5PH8BTYD",
|
||||
"idx": 3,
|
||||
"name": "disk3",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": true,
|
||||
"size": 11718885324,
|
||||
"status": "DISK_OK",
|
||||
"temp": 30,
|
||||
"transport": "ata",
|
||||
"type": "Data",
|
||||
"warning": null,
|
||||
},
|
||||
],
|
||||
id: expect.any(String),
|
||||
parities: [
|
||||
{
|
||||
comment: null,
|
||||
critical: null,
|
||||
device: 'sdh',
|
||||
exportable: false,
|
||||
format: 'GPT: 4KiB-aligned',
|
||||
fsFree: null,
|
||||
fsSize: null,
|
||||
fsType: null,
|
||||
fsUsed: null,
|
||||
id: 'ST18000NM000J-2TV103_ZR585CPY',
|
||||
idx: 0,
|
||||
name: 'parity',
|
||||
numErrors: 0,
|
||||
numReads: 0,
|
||||
numWrites: 0,
|
||||
rotational: true,
|
||||
size: 17578328012,
|
||||
status: 'DISK_OK',
|
||||
temp: 25,
|
||||
transport: 'ata',
|
||||
type: 'Parity',
|
||||
warning: null,
|
||||
},
|
||||
"id": expect.any(String),
|
||||
"parities": [
|
||||
{
|
||||
"comment": null,
|
||||
"critical": null,
|
||||
"device": "sdh",
|
||||
"exportable": false,
|
||||
"format": "GPT: 4KiB-aligned",
|
||||
"fsFree": null,
|
||||
"fsSize": null,
|
||||
"fsType": null,
|
||||
"fsUsed": null,
|
||||
"id": "ST18000NM000J-2TV103_ZR585CPY",
|
||||
"idx": 0,
|
||||
"name": "parity",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": true,
|
||||
"size": 17578328012,
|
||||
"status": "DISK_OK",
|
||||
"temp": 25,
|
||||
"transport": "ata",
|
||||
"type": "Parity",
|
||||
"warning": null,
|
||||
},
|
||||
],
|
||||
state: 'STOPPED',
|
||||
});
|
||||
"state": "STOPPED",
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
import { expect, test, vi } from 'vitest';
|
||||
|
||||
import { ConsoleNotifier } from '@app/core/notifiers/console';
|
||||
|
||||
vi.mock('@app/core/log', () => ({
|
||||
logger: {
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
},
|
||||
graphqlLogger: {
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
test('Creates a console notifier', () => {
|
||||
const notifier = new ConsoleNotifier();
|
||||
expect(notifier.level).toBe('info');
|
||||
expect(notifier.template).toBe('{{{ data }}}');
|
||||
});
|
||||
@@ -1,24 +0,0 @@
|
||||
import { expect, test, vi } from 'vitest';
|
||||
|
||||
import { UnraidLocalNotifier } from '@app/core/notifiers/unraid-local';
|
||||
|
||||
vi.mock('@app/core/log', () => ({
|
||||
logger: {
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
},
|
||||
graphqlLogger: {
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
test('Creates an email notifier', () => {
|
||||
const notifier = new UnraidLocalNotifier({ level: 'info' });
|
||||
expect(notifier.level).toBe('normal');
|
||||
expect(notifier.template).toBe('{{ message }}');
|
||||
const rendered = notifier.render({ message: 'Remote access started' });
|
||||
expect(rendered).toEqual('Remote access started');
|
||||
});
|
||||
8
api/src/__test__/core/permissions.test.ts
Normal file
8
api/src/__test__/core/permissions.test.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import 'reflect-metadata';
|
||||
|
||||
import { expect, test } from 'vitest';
|
||||
import { setupPermissions } from '@app/core/permissions';
|
||||
|
||||
test('Returns default permissions', () => {
|
||||
expect(setupPermissions()).toMatchSnapshot();
|
||||
});
|
||||
@@ -1,23 +0,0 @@
|
||||
import { expect, test, vi } from 'vitest';
|
||||
|
||||
import type { SliceState } from '@app/store/modules/emhttp';
|
||||
import { getters } from '@app/store';
|
||||
|
||||
test('Returns true if the array is started', async () => {
|
||||
vi.spyOn(getters, 'emhttp').mockImplementation(
|
||||
() => ({ var: { mdState: 'STARTED' } }) as unknown as SliceState
|
||||
);
|
||||
|
||||
const { arrayIsRunning } = await import('@app/core/utils/array/array-is-running');
|
||||
expect(arrayIsRunning()).toBe(true);
|
||||
vi.spyOn(getters, 'emhttp').mockReset();
|
||||
});
|
||||
|
||||
test('Returns false if the array is stopped', async () => {
|
||||
vi.spyOn(getters, 'emhttp').mockImplementation(
|
||||
() => ({ var: { mdState: 'Stopped' } }) as unknown as SliceState
|
||||
);
|
||||
const { arrayIsRunning } = await import('@app/core/utils/array/array-is-running');
|
||||
expect(arrayIsRunning()).toBe(false);
|
||||
vi.spyOn(getters, 'emhttp').mockReset();
|
||||
});
|
||||
@@ -1,123 +1,126 @@
|
||||
import 'reflect-metadata';
|
||||
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
import { test, expect } from 'vitest';
|
||||
import { getWriteableConfig } from '@app/core/utils/files/config-file-normalizer';
|
||||
import { initialState } from '@app/store/modules/config';
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
|
||||
test('it creates a FLASH config with NO OPTIONAL values', () => {
|
||||
const basicConfig = initialState;
|
||||
const config = getWriteableConfig(basicConfig, 'flash');
|
||||
expect(config).toMatchInlineSnapshot(`
|
||||
{
|
||||
"api": {
|
||||
"extraOrigins": "",
|
||||
"version": "",
|
||||
},
|
||||
"local": {
|
||||
"sandbox": "no",
|
||||
},
|
||||
"remote": {
|
||||
"accesstoken": "",
|
||||
"apikey": "",
|
||||
"avatar": "",
|
||||
"dynamicRemoteAccessType": "DISABLED",
|
||||
"email": "",
|
||||
"idtoken": "",
|
||||
"localApiKey": "",
|
||||
"refreshtoken": "",
|
||||
"regWizTime": "",
|
||||
"ssoSubIds": "",
|
||||
"upnpEnabled": "",
|
||||
"username": "",
|
||||
"wanaccess": "",
|
||||
"wanport": "",
|
||||
},
|
||||
}
|
||||
`);
|
||||
{
|
||||
"api": {
|
||||
"extraOrigins": "",
|
||||
"version": "",
|
||||
},
|
||||
"local": {},
|
||||
"notifier": {
|
||||
"apikey": "",
|
||||
},
|
||||
"remote": {
|
||||
"accesstoken": "",
|
||||
"apikey": "",
|
||||
"avatar": "",
|
||||
"dynamicRemoteAccessType": "DISABLED",
|
||||
"email": "",
|
||||
"idtoken": "",
|
||||
"refreshtoken": "",
|
||||
"regWizTime": "",
|
||||
"username": "",
|
||||
"wanaccess": "",
|
||||
"wanport": "",
|
||||
},
|
||||
"upc": {
|
||||
"apikey": "",
|
||||
},
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('it creates a MEMORY config with NO OPTIONAL values', () => {
|
||||
const basicConfig = initialState;
|
||||
const config = getWriteableConfig(basicConfig, 'memory');
|
||||
expect(config).toMatchInlineSnapshot(`
|
||||
{
|
||||
"api": {
|
||||
"extraOrigins": "",
|
||||
"version": "",
|
||||
},
|
||||
"connectionStatus": {
|
||||
"minigraph": "PRE_INIT",
|
||||
"upnpStatus": "",
|
||||
},
|
||||
"local": {
|
||||
"sandbox": "no",
|
||||
},
|
||||
"remote": {
|
||||
"accesstoken": "",
|
||||
"allowedOrigins": "/var/run/unraid-notifications.sock, /var/run/unraid-php.sock, /var/run/unraid-cli.sock, https://connect.myunraid.net, https://connect-staging.myunraid.net, https://dev-my.myunraid.net:4000",
|
||||
"apikey": "",
|
||||
"avatar": "",
|
||||
"dynamicRemoteAccessType": "DISABLED",
|
||||
"email": "",
|
||||
"idtoken": "",
|
||||
"localApiKey": "",
|
||||
"refreshtoken": "",
|
||||
"regWizTime": "",
|
||||
"ssoSubIds": "",
|
||||
"upnpEnabled": "",
|
||||
"username": "",
|
||||
"wanaccess": "",
|
||||
"wanport": "",
|
||||
},
|
||||
}
|
||||
`);
|
||||
{
|
||||
"api": {
|
||||
"extraOrigins": "",
|
||||
"version": "",
|
||||
},
|
||||
"connectionStatus": {
|
||||
"minigraph": "PRE_INIT",
|
||||
},
|
||||
"local": {},
|
||||
"notifier": {
|
||||
"apikey": "",
|
||||
},
|
||||
"remote": {
|
||||
"accesstoken": "",
|
||||
"allowedOrigins": "/var/run/unraid-notifications.sock, /var/run/unraid-php.sock, /var/run/unraid-cli.sock, https://connect.myunraid.net, https://connect-staging.myunraid.net, https://dev-my.myunraid.net:4000",
|
||||
"apikey": "",
|
||||
"avatar": "",
|
||||
"dynamicRemoteAccessType": "DISABLED",
|
||||
"email": "",
|
||||
"idtoken": "",
|
||||
"refreshtoken": "",
|
||||
"regWizTime": "",
|
||||
"username": "",
|
||||
"wanaccess": "",
|
||||
"wanport": "",
|
||||
},
|
||||
"upc": {
|
||||
"apikey": "",
|
||||
},
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('it creates a FLASH config with OPTIONAL values', () => {
|
||||
const basicConfig = cloneDeep(initialState);
|
||||
// 2fa & t2fa should be ignored
|
||||
basicConfig.remote['2Fa'] = 'yes';
|
||||
basicConfig.local['2Fa'] = 'yes';
|
||||
basicConfig.local.showT2Fa = 'yes';
|
||||
|
||||
basicConfig.api.extraOrigins = 'myextra.origins';
|
||||
basicConfig.remote.upnpEnabled = 'yes';
|
||||
basicConfig.connectionStatus.upnpStatus = 'Turned On';
|
||||
const config = getWriteableConfig(basicConfig, 'flash');
|
||||
expect(config).toMatchInlineSnapshot(`
|
||||
{
|
||||
"api": {
|
||||
"extraOrigins": "myextra.origins",
|
||||
"version": "",
|
||||
},
|
||||
"local": {
|
||||
"sandbox": "no",
|
||||
},
|
||||
"remote": {
|
||||
"accesstoken": "",
|
||||
"apikey": "",
|
||||
"avatar": "",
|
||||
"dynamicRemoteAccessType": "DISABLED",
|
||||
"email": "",
|
||||
"idtoken": "",
|
||||
"localApiKey": "",
|
||||
"refreshtoken": "",
|
||||
"regWizTime": "",
|
||||
"ssoSubIds": "",
|
||||
"upnpEnabled": "yes",
|
||||
"username": "",
|
||||
"wanaccess": "",
|
||||
"wanport": "",
|
||||
},
|
||||
}
|
||||
`);
|
||||
{
|
||||
"api": {
|
||||
"extraOrigins": "myextra.origins",
|
||||
"version": "",
|
||||
},
|
||||
"local": {
|
||||
"2Fa": "yes",
|
||||
"showT2Fa": "yes",
|
||||
},
|
||||
"notifier": {
|
||||
"apikey": "",
|
||||
},
|
||||
"remote": {
|
||||
"2Fa": "yes",
|
||||
"accesstoken": "",
|
||||
"apikey": "",
|
||||
"avatar": "",
|
||||
"dynamicRemoteAccessType": "DISABLED",
|
||||
"email": "",
|
||||
"idtoken": "",
|
||||
"refreshtoken": "",
|
||||
"regWizTime": "",
|
||||
"upnpEnabled": "yes",
|
||||
"username": "",
|
||||
"wanaccess": "",
|
||||
"wanport": "",
|
||||
},
|
||||
"upc": {
|
||||
"apikey": "",
|
||||
},
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('it creates a MEMORY config with OPTIONAL values', () => {
|
||||
const basicConfig = cloneDeep(initialState);
|
||||
// 2fa & t2fa should be ignored
|
||||
basicConfig.remote['2Fa'] = 'yes';
|
||||
basicConfig.local['2Fa'] = 'yes';
|
||||
basicConfig.local.showT2Fa = 'yes';
|
||||
@@ -126,35 +129,41 @@ test('it creates a MEMORY config with OPTIONAL values', () => {
|
||||
basicConfig.connectionStatus.upnpStatus = 'Turned On';
|
||||
const config = getWriteableConfig(basicConfig, 'memory');
|
||||
expect(config).toMatchInlineSnapshot(`
|
||||
{
|
||||
"api": {
|
||||
"extraOrigins": "myextra.origins",
|
||||
"version": "",
|
||||
},
|
||||
"connectionStatus": {
|
||||
"minigraph": "PRE_INIT",
|
||||
"upnpStatus": "Turned On",
|
||||
},
|
||||
"local": {
|
||||
"sandbox": "no",
|
||||
},
|
||||
"remote": {
|
||||
"accesstoken": "",
|
||||
"allowedOrigins": "/var/run/unraid-notifications.sock, /var/run/unraid-php.sock, /var/run/unraid-cli.sock, https://connect.myunraid.net, https://connect-staging.myunraid.net, https://dev-my.myunraid.net:4000",
|
||||
"apikey": "",
|
||||
"avatar": "",
|
||||
"dynamicRemoteAccessType": "DISABLED",
|
||||
"email": "",
|
||||
"idtoken": "",
|
||||
"localApiKey": "",
|
||||
"refreshtoken": "",
|
||||
"regWizTime": "",
|
||||
"ssoSubIds": "",
|
||||
"upnpEnabled": "yes",
|
||||
"username": "",
|
||||
"wanaccess": "",
|
||||
"wanport": "",
|
||||
},
|
||||
}
|
||||
`);
|
||||
{
|
||||
"api": {
|
||||
"extraOrigins": "myextra.origins",
|
||||
"version": "",
|
||||
},
|
||||
"connectionStatus": {
|
||||
"minigraph": "PRE_INIT",
|
||||
"upnpStatus": "Turned On",
|
||||
},
|
||||
"local": {
|
||||
"2Fa": "yes",
|
||||
"showT2Fa": "yes",
|
||||
},
|
||||
"notifier": {
|
||||
"apikey": "",
|
||||
},
|
||||
"remote": {
|
||||
"2Fa": "yes",
|
||||
"accesstoken": "",
|
||||
"allowedOrigins": "/var/run/unraid-notifications.sock, /var/run/unraid-php.sock, /var/run/unraid-cli.sock, https://connect.myunraid.net, https://connect-staging.myunraid.net, https://dev-my.myunraid.net:4000",
|
||||
"apikey": "",
|
||||
"avatar": "",
|
||||
"dynamicRemoteAccessType": "DISABLED",
|
||||
"email": "",
|
||||
"idtoken": "",
|
||||
"refreshtoken": "",
|
||||
"regWizTime": "",
|
||||
"upnpEnabled": "yes",
|
||||
"username": "",
|
||||
"wanaccess": "",
|
||||
"wanport": "",
|
||||
},
|
||||
"upc": {
|
||||
"apikey": "",
|
||||
},
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import { test, expect } from 'vitest';
|
||||
import { parse } from 'ini';
|
||||
import { Serializer } from 'multi-ini';
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
import { safelySerializeObjectToIni } from '@app/core/utils/files/safe-ini-serializer';
|
||||
import { Serializer } from 'multi-ini';
|
||||
|
||||
test('MultiIni breaks when serializing an object with a boolean inside', async () => {
|
||||
const objectToSerialize = {
|
||||
|
||||
@@ -1,23 +1,23 @@
|
||||
import { expect, test } from 'vitest';
|
||||
import { getBannerPathIfPresent, getCasePathIfPresent } from "@app/core/utils/images/image-file-helpers";
|
||||
import { loadDynamixConfigFile } from "@app/store/actions/load-dynamix-config-file";
|
||||
import { store } from "@app/store/index";
|
||||
|
||||
import { getBannerPathIfPresent, getCasePathIfPresent } from '@app/core/utils/images/image-file-helpers';
|
||||
import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-file';
|
||||
import { store } from '@app/store/index';
|
||||
import { expect, test } from "vitest";
|
||||
|
||||
test('get case path returns expected result', async () => {
|
||||
await expect(getCasePathIfPresent()).resolves.toContain('/dev/dynamix/case-model.png');
|
||||
});
|
||||
test('get case path returns expected result', () => {
|
||||
expect(getCasePathIfPresent()).resolves.toContain('/dev/dynamix/case-model.png')
|
||||
})
|
||||
|
||||
test('get banner path returns null (state unloaded)', async () => {
|
||||
await expect(getBannerPathIfPresent()).resolves.toMatchInlineSnapshot('null');
|
||||
});
|
||||
test('get banner path returns null (state unloaded)', () => {
|
||||
expect(getBannerPathIfPresent()).resolves.toMatchInlineSnapshot('null')
|
||||
})
|
||||
|
||||
test('get banner path returns the banner (state loaded)', async () => {
|
||||
await store.dispatch(loadDynamixConfigFile()).unwrap();
|
||||
await expect(getBannerPathIfPresent()).resolves.toContain('/dev/dynamix/banner.png');
|
||||
});
|
||||
test('get banner path returns the banner (state loaded)', async() => {
|
||||
await store.dispatch(loadDynamixConfigFile()).unwrap();
|
||||
expect(getBannerPathIfPresent()).resolves.toContain('/dev/dynamix/banner.png');
|
||||
})
|
||||
|
||||
test('get banner path returns null when no banner (state loaded)', async () => {
|
||||
await store.dispatch(loadDynamixConfigFile()).unwrap();
|
||||
await expect(getBannerPathIfPresent('notabanner.png')).resolves.toMatchInlineSnapshot('null');
|
||||
});
|
||||
expect(getBannerPathIfPresent('notabanner.png')).resolves.toMatchInlineSnapshot('null');
|
||||
});
|
||||
@@ -1,8 +0,0 @@
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
import { cleanStdout } from '@app/core/utils/misc/clean-stdout';
|
||||
|
||||
test('Returns trimmed stdout from execa command', () => {
|
||||
expect(cleanStdout({ stdout: 'test' })).toBe('test');
|
||||
expect(cleanStdout({ stdout: 'test ' })).toBe('test');
|
||||
});
|
||||
@@ -1,64 +0,0 @@
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
import { store } from '@app/store';
|
||||
import { FileLoadStatus, StateFileKey } from '@app/store/types';
|
||||
|
||||
import '@app/core/utils/misc/get-key-file';
|
||||
import '@app/store/modules/emhttp';
|
||||
|
||||
test('Before loading key returns null', async () => {
|
||||
const { getKeyFile } = await import('@app/core/utils/misc/get-key-file');
|
||||
const { status } = store.getState().registration;
|
||||
|
||||
expect(status).toBe(FileLoadStatus.UNLOADED);
|
||||
await expect(getKeyFile()).resolves.toBe(null);
|
||||
});
|
||||
|
||||
test('Requires emhttp to be loaded to find key file', async () => {
|
||||
const { getKeyFile } = await import('@app/core/utils/misc/get-key-file');
|
||||
const { loadRegistrationKey } = await import('@app/store/modules/registration');
|
||||
|
||||
// Load registration key into store
|
||||
await store.dispatch(loadRegistrationKey());
|
||||
|
||||
// Check if store has state files loaded
|
||||
const { status } = store.getState().registration;
|
||||
expect(status).toBe(FileLoadStatus.LOADED);
|
||||
await expect(getKeyFile()).resolves.toBe(null);
|
||||
});
|
||||
|
||||
test('Returns empty key if key location is empty', async () => {
|
||||
const { getKeyFile } = await import('@app/core/utils/misc/get-key-file');
|
||||
const { updateEmhttpState } = await import('@app/store/modules/emhttp');
|
||||
|
||||
// Set key file location as empty
|
||||
// This should only happen if the user doesn't have a key file
|
||||
store.dispatch(
|
||||
updateEmhttpState({
|
||||
field: StateFileKey.var,
|
||||
state: {
|
||||
regFile: '',
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
// Check if store has state files loaded
|
||||
const { status } = store.getState().registration;
|
||||
expect(status).toBe(FileLoadStatus.LOADED);
|
||||
await expect(getKeyFile()).resolves.toBe('');
|
||||
});
|
||||
|
||||
test('Returns decoded key file if key location exists', async () => {
|
||||
const { getKeyFile } = await import('@app/core/utils/misc/get-key-file');
|
||||
const { loadStateFiles } = await import('@app/store/modules/emhttp');
|
||||
|
||||
// Load state files into store
|
||||
await store.dispatch(loadStateFiles());
|
||||
|
||||
// Check if store has state files loaded
|
||||
const { status } = store.getState().registration;
|
||||
expect(status).toBe(FileLoadStatus.LOADED);
|
||||
await expect(getKeyFile()).resolves.toMatchInlineSnapshot(
|
||||
'"hVs1tLjvC9FiiQsIwIQ7G1KszAcexf0IneThhnmf22SB0dGs5WzRkqMiSMmt2DtR5HOXFUD32YyxuzGeUXmky3zKpSu6xhZNKVg5atGM1OfvkzHBMldI3SeBLuUFSgejLbpNUMdTrbk64JJdbzle4O8wiQgkIpAMIGxeYLwLBD4zHBcfyzq40QnxG--HcX6j25eE0xqa2zWj-j0b0rCAXahJV2a3ySCbPzr1MvfPRTVb0rr7KJ-25R592hYrz4H7Sc1B3p0lr6QUxHE6o7bcYrWKDRtIVoZ8SMPpd1_0gzYIcl5GsDFzFumTXUh8NEnl0Q8hwW1YE-tRc6Y_rrvd7w"'
|
||||
);
|
||||
});
|
||||
@@ -1,11 +1,9 @@
|
||||
import { readFile, writeFile } from 'fs/promises';
|
||||
|
||||
import { parse } from 'ini';
|
||||
import { Parser as MultiIniParser } from 'multi-ini';
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
import { safelySerializeObjectToIni } from '@app/core/utils/files/safe-ini-serializer';
|
||||
import { test, expect } from 'vitest';
|
||||
import { parseConfig } from '@app/core/utils/misc/parse-config';
|
||||
import { Parser as MultiIniParser } from 'multi-ini';
|
||||
import { readFile, writeFile } from 'fs/promises';
|
||||
import { parse } from 'ini';
|
||||
import { safelySerializeObjectToIni } from '@app/core/utils/files/safe-ini-serializer';
|
||||
|
||||
const iniTestData = `["root"]
|
||||
idx="0"
|
||||
@@ -24,11 +22,11 @@ desc=""
|
||||
passwd="no"`;
|
||||
|
||||
test('it loads a config from a passed in ini file successfully', () => {
|
||||
const res = parseConfig<any>({
|
||||
file: iniTestData,
|
||||
type: 'ini',
|
||||
});
|
||||
expect(res).toMatchInlineSnapshot(`
|
||||
const res = parseConfig<any>({
|
||||
file: iniTestData,
|
||||
type: 'ini',
|
||||
});
|
||||
expect(res).toMatchInlineSnapshot(`
|
||||
{
|
||||
"root": {
|
||||
"desc": "Console and webGui login account",
|
||||
@@ -50,26 +48,26 @@ test('it loads a config from a passed in ini file successfully', () => {
|
||||
},
|
||||
}
|
||||
`);
|
||||
expect(res?.root.desc).toEqual('Console and webGui login account');
|
||||
expect(res?.root.desc).toEqual('Console and webGui login account');
|
||||
});
|
||||
|
||||
test('it loads a config from disk properly', () => {
|
||||
const path = './dev/states/var.ini';
|
||||
const res = parseConfig<any>({ filePath: path, type: 'ini' });
|
||||
expect(res.DOMAIN_SHORT).toEqual(undefined);
|
||||
expect(res.domainShort).toEqual('');
|
||||
expect(res.shareCount).toEqual('0');
|
||||
const path = './dev/states/var.ini';
|
||||
const res = parseConfig<any>({ filePath: path, type: 'ini' });
|
||||
expect(res.DOMAIN_SHORT).toEqual(undefined);
|
||||
expect(res.domainShort).toEqual('');
|
||||
expect(res.shareCount).toEqual('0');
|
||||
});
|
||||
|
||||
test('Confirm Multi-Ini Parser Still Broken', () => {
|
||||
const parser = new MultiIniParser();
|
||||
const res = parser.parse(iniTestData);
|
||||
expect(res).toMatchInlineSnapshot('{}');
|
||||
const parser = new MultiIniParser();
|
||||
const res = parser.parse(iniTestData);
|
||||
expect(res).toMatchInlineSnapshot('{}');
|
||||
});
|
||||
|
||||
test('Combine Ini and Multi-Ini to read and then write a file with quotes', async () => {
|
||||
const parsedFile = parse(iniTestData);
|
||||
expect(parsedFile).toMatchInlineSnapshot(`
|
||||
const parsedFile = parse(iniTestData);
|
||||
expect(parsedFile).toMatchInlineSnapshot(`
|
||||
{
|
||||
"root": {
|
||||
"desc": "Console and webGui login account",
|
||||
@@ -92,10 +90,10 @@ test('Combine Ini and Multi-Ini to read and then write a file with quotes', asyn
|
||||
}
|
||||
`);
|
||||
|
||||
const ini = safelySerializeObjectToIni(parsedFile);
|
||||
await writeFile('/tmp/test.ini', ini);
|
||||
const file = await readFile('/tmp/test.ini', 'utf-8');
|
||||
expect(file).toMatchInlineSnapshot(`
|
||||
const ini = safelySerializeObjectToIni(parsedFile);
|
||||
await writeFile('/tmp/test.ini', ini);
|
||||
const file = await readFile('/tmp/test.ini', 'utf-8');
|
||||
expect(file).toMatchInlineSnapshot(`
|
||||
"[root]
|
||||
idx="0"
|
||||
name="root"
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
import { getShares } from '@app/core/utils/shares/get-shares';
|
||||
import { store } from '@app/store';
|
||||
import { loadStateFiles } from '@app/store/modules/emhttp';
|
||||
|
||||
test('Returns both disk and user shares', async () => {
|
||||
await store.dispatch(loadStateFiles());
|
||||
await store.dispatch(loadStateFiles());
|
||||
|
||||
expect(getShares()).toMatchInlineSnapshot(`
|
||||
expect(getShares()).toMatchInlineSnapshot(`
|
||||
{
|
||||
"disks": [],
|
||||
"users": [
|
||||
@@ -97,8 +96,8 @@ test('Returns both disk and user shares', async () => {
|
||||
});
|
||||
|
||||
test('Returns shares by type', async () => {
|
||||
await store.dispatch(loadStateFiles());
|
||||
expect(getShares('user')).toMatchInlineSnapshot(`
|
||||
await store.dispatch(loadStateFiles());
|
||||
expect(getShares('user')).toMatchInlineSnapshot(`
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
@@ -120,7 +119,7 @@ test('Returns shares by type', async () => {
|
||||
"used": 33619300,
|
||||
}
|
||||
`);
|
||||
expect(getShares('users')).toMatchInlineSnapshot(`
|
||||
expect(getShares('users')).toMatchInlineSnapshot(`
|
||||
[
|
||||
{
|
||||
"allocator": "highwater",
|
||||
@@ -204,12 +203,12 @@ test('Returns shares by type', async () => {
|
||||
},
|
||||
]
|
||||
`);
|
||||
expect(getShares('disk')).toMatchInlineSnapshot('null');
|
||||
expect(getShares('disks')).toMatchInlineSnapshot('[]');
|
||||
expect(getShares('disk')).toMatchInlineSnapshot('null');
|
||||
expect(getShares('disks')).toMatchInlineSnapshot('[]');
|
||||
});
|
||||
|
||||
test('Returns shares by name', async () => {
|
||||
expect(getShares('user', { name: 'domains' })).toMatchInlineSnapshot(`
|
||||
expect(getShares('user', { name: 'domains' })).toMatchInlineSnapshot(`
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
@@ -231,8 +230,8 @@ test('Returns shares by name', async () => {
|
||||
"used": 33619300,
|
||||
}
|
||||
`);
|
||||
expect(getShares('user', { name: 'non-existent-user-share' })).toMatchInlineSnapshot('null');
|
||||
// @TODO: disk shares need to be added to the dev ini files
|
||||
expect(getShares('disk', { name: 'disk1' })).toMatchInlineSnapshot('null');
|
||||
expect(getShares('disk', { name: 'non-existent-disk-share' })).toMatchInlineSnapshot('null');
|
||||
expect(getShares('user', { name: 'non-existent-user-share' })).toMatchInlineSnapshot('null');
|
||||
// @TODO: disk shares need to be added to the dev ini files
|
||||
expect(getShares('disk', { name: 'disk1' })).toMatchInlineSnapshot('null');
|
||||
expect(getShares('disk', { name: 'non-existent-disk-share' })).toMatchInlineSnapshot('null');
|
||||
});
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
import { afterEach, expect, test, vi } from 'vitest';
|
||||
|
||||
import { checkDNS } from '@app/graphql/resolvers/query/cloud/check-dns';
|
||||
import { store } from '@app/store';
|
||||
import { clearKey } from '@app/store/modules/cache';
|
||||
import { CacheKeys } from '@app/store/types';
|
||||
|
||||
afterEach(() => {
|
||||
store.dispatch(clearKey(CacheKeys.checkDns));
|
||||
});
|
||||
|
||||
test('it resolves dns successfully', async () => {
|
||||
// @TODO
|
||||
const dns = await checkDNS('example.com');
|
||||
expect(dns.cloudIp).not.toBeNull();
|
||||
}, 25_000);
|
||||
|
||||
test('testing twice results in a cache hit', async () => {
|
||||
// Hit mothership
|
||||
const getters = await import('@app/store/getters');
|
||||
const dnsSpy = vi.spyOn(getters, 'getDnsCache');
|
||||
const dns = await checkDNS();
|
||||
expect(dns.cloudIp).toBeTypeOf('string');
|
||||
expect(dnsSpy.mock.results[0]).toMatchInlineSnapshot(`
|
||||
{
|
||||
"type": "return",
|
||||
"value": undefined,
|
||||
}
|
||||
`);
|
||||
const dnslookup2 = await checkDNS();
|
||||
expect(dnslookup2.cloudIp).toEqual(dns.cloudIp);
|
||||
expect(dnsSpy.mock.results[1].value.cloudIp).toEqual(dns.cloudIp);
|
||||
expect(store.getState().cache.nodeCache.getTtl(CacheKeys.checkDns)).toBeGreaterThan(500);
|
||||
});
|
||||
@@ -1,26 +1,10 @@
|
||||
import 'reflect-metadata';
|
||||
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
import packageJson from '@app/../package.json';
|
||||
import { checkMothershipAuthentication } from '@app/graphql/resolvers/query/cloud/check-mothership-authentication';
|
||||
import { checkMothershipAuthentication } from "@app/graphql/resolvers/query/cloud/check-mothership-authentication";
|
||||
import { expect, test } from "vitest";
|
||||
import packageJson from '@app/../package.json'
|
||||
|
||||
test('It fails to authenticate with mothership with no credentials', async () => {
|
||||
try {
|
||||
await expect(
|
||||
checkMothershipAuthentication('BAD', 'BAD')
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`[Error: Failed to connect to https://mothership.unraid.net/ws with a "426" HTTP error.]`
|
||||
);
|
||||
expect(packageJson.version).not.toBeNull();
|
||||
await expect(
|
||||
checkMothershipAuthentication(packageJson.version, 'BAD_API_KEY')
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(`[Error: Invalid credentials]`);
|
||||
} catch (error) {
|
||||
if (error instanceof Error && error.message.includes('Timeout')) {
|
||||
// Test succeeds on timeout
|
||||
return;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
await expect(checkMothershipAuthentication('BAD', 'BAD')).rejects.toThrowErrorMatchingInlineSnapshot(`[Error: Failed to connect to https://mothership.unraid.net/ws with a "426" HTTP error.]`);
|
||||
expect(packageJson.version).not.toBeNull();
|
||||
await expect(checkMothershipAuthentication(packageJson.version, 'BAD_API_KEY')).rejects.toThrowErrorMatchingInlineSnapshot(`[Error: Invalid credentials]`);
|
||||
}, 15_000)
|
||||
@@ -1,197 +1,124 @@
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
import type { NginxUrlFields } from '@app/graphql/resolvers/subscription/network';
|
||||
import { type Nginx } from '@app/core/types/states/nginx';
|
||||
import {
|
||||
getServerIps,
|
||||
getUrlForField,
|
||||
getUrlForServer,
|
||||
} from '@app/graphql/resolvers/subscription/network';
|
||||
import { type Nginx } from '../../../../core/types/states/nginx';
|
||||
import { getUrlForField, getUrlForServer, getServerIps, type NginxUrlFields } from '@app/graphql/resolvers/subscription/network';
|
||||
import { store } from '@app/store';
|
||||
import { loadConfigFile } from '@app/store/modules/config';
|
||||
import { loadStateFiles } from '@app/store/modules/emhttp';
|
||||
import { loadConfigFile } from '@app/store/modules/config';
|
||||
|
||||
test.each([
|
||||
[{ httpPort: 80, httpsPort: 443, url: 'my-default-url.com' }],
|
||||
[{ httpPort: 123, httpsPort: 443, url: 'my-default-url.com' }],
|
||||
[{ httpPort: 80, httpsPort: 12_345, url: 'my-default-url.com' }],
|
||||
[{ httpPort: 212, httpsPort: 3_233, url: 'my-default-url.com' }],
|
||||
[{ httpPort: 80, httpsPort: 443, url: 'https://BROKEN_URL' }],
|
||||
[{ httpPort: 80, httpsPort: 443, url: 'my-default-url.com' }],
|
||||
[{ httpPort: 123, httpsPort: 443, url: 'my-default-url.com' }],
|
||||
[{ httpPort: 80, httpsPort: 12_345, url: 'my-default-url.com' }],
|
||||
[{ httpPort: 212, httpsPort: 3_233, url: 'my-default-url.com' }],
|
||||
[{ httpPort: 80, httpsPort: 443, url: 'https://BROKEN_URL' }],
|
||||
|
||||
])('getUrlForField', ({ httpPort, httpsPort, url }) => {
|
||||
const responseInsecure = getUrlForField({
|
||||
port: httpPort,
|
||||
url,
|
||||
});
|
||||
const responseInsecure = getUrlForField({
|
||||
port: httpPort,
|
||||
url,
|
||||
});
|
||||
|
||||
const responseSecure = getUrlForField({
|
||||
portSsl: httpsPort,
|
||||
url,
|
||||
});
|
||||
if (httpPort === 80) {
|
||||
expect(responseInsecure.port).toBe('');
|
||||
} else {
|
||||
expect(responseInsecure.port).toBe(httpPort.toString());
|
||||
}
|
||||
const responseSecure = getUrlForField({
|
||||
portSsl: httpsPort,
|
||||
url,
|
||||
});
|
||||
if (httpPort === 80) {
|
||||
expect(responseInsecure.port).toBe('');
|
||||
} else {
|
||||
expect(responseInsecure.port).toBe(httpPort.toString());
|
||||
}
|
||||
|
||||
if (httpsPort === 443) {
|
||||
expect(responseSecure.port).toBe('');
|
||||
} else {
|
||||
expect(responseSecure.port).toBe(httpsPort.toString());
|
||||
}
|
||||
if (httpsPort === 443) {
|
||||
expect(responseSecure.port).toBe('');
|
||||
} else {
|
||||
expect(responseSecure.port).toBe(httpsPort.toString());
|
||||
}
|
||||
});
|
||||
|
||||
test('getUrlForServer - field exists, ssl disabled', () => {
|
||||
const result = getUrlForServer({
|
||||
nginx: {
|
||||
lanIp: '192.168.1.1',
|
||||
sslEnabled: false,
|
||||
httpPort: 123,
|
||||
httpsPort: 445,
|
||||
} as const as Nginx,
|
||||
field: 'lanIp',
|
||||
});
|
||||
expect(result).toMatchInlineSnapshot('"http://192.168.1.1:123/"');
|
||||
const result = getUrlForServer({ nginx: { lanIp: '192.168.1.1', sslEnabled: false, httpPort: 123, httpsPort: 445 } as const as Nginx,
|
||||
field: 'lanIp',
|
||||
});
|
||||
expect(result).toMatchInlineSnapshot('"http://192.168.1.1:123/"');
|
||||
});
|
||||
|
||||
test('getUrlForServer - field exists, ssl yes', () => {
|
||||
const result = getUrlForServer({
|
||||
nginx: {
|
||||
lanIp: '192.168.1.1',
|
||||
sslEnabled: true,
|
||||
sslMode: 'yes',
|
||||
httpPort: 123,
|
||||
httpsPort: 445,
|
||||
} as const as Nginx,
|
||||
field: 'lanIp',
|
||||
});
|
||||
expect(result).toMatchInlineSnapshot('"https://192.168.1.1:445/"');
|
||||
const result = getUrlForServer({
|
||||
nginx: { lanIp: '192.168.1.1', sslEnabled: true, sslMode: 'yes', httpPort: 123, httpsPort: 445 } as const as Nginx,
|
||||
field: 'lanIp',
|
||||
});
|
||||
expect(result).toMatchInlineSnapshot('"https://192.168.1.1:445/"');
|
||||
});
|
||||
|
||||
test('getUrlForServer - field exists, ssl yes, port empty', () => {
|
||||
const result = getUrlForServer({
|
||||
nginx: {
|
||||
lanIp: '192.168.1.1',
|
||||
sslEnabled: true,
|
||||
sslMode: 'yes',
|
||||
httpPort: 80,
|
||||
httpsPort: 443,
|
||||
} as const as Nginx,
|
||||
field: 'lanIp',
|
||||
});
|
||||
expect(result).toMatchInlineSnapshot('"https://192.168.1.1/"');
|
||||
const result = getUrlForServer(
|
||||
{ nginx: { lanIp: '192.168.1.1', sslEnabled: true, sslMode: 'yes', httpPort: 80, httpsPort: 443 } as const as Nginx,
|
||||
field: 'lanIp',
|
||||
});
|
||||
expect(result).toMatchInlineSnapshot('"https://192.168.1.1/"');
|
||||
});
|
||||
|
||||
test('getUrlForServer - field exists, ssl auto', async () => {
|
||||
const getResult = async () =>
|
||||
getUrlForServer({
|
||||
nginx: {
|
||||
lanIp: '192.168.1.1',
|
||||
sslEnabled: true,
|
||||
sslMode: 'auto',
|
||||
httpPort: 123,
|
||||
httpsPort: 445,
|
||||
} as const as Nginx,
|
||||
field: 'lanIp',
|
||||
});
|
||||
await expect(getResult).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`[Error: Cannot get IP Based URL for field: "lanIp" SSL mode auto]`
|
||||
);
|
||||
test('getUrlForServer - field exists, ssl auto', () => {
|
||||
const getResult = async () => getUrlForServer({
|
||||
nginx: { lanIp: '192.168.1.1', sslEnabled: true, sslMode: 'auto', httpPort: 123, httpsPort: 445 } as const as Nginx,
|
||||
field: 'lanIp',
|
||||
});
|
||||
void expect(getResult).rejects.toThrowErrorMatchingInlineSnapshot(`[Error: Cannot get IP Based URL for field: "lanIp" SSL mode auto]`);
|
||||
});
|
||||
|
||||
test('getUrlForServer - field does not exist, ssl disabled', async () => {
|
||||
const getResult = async () =>
|
||||
getUrlForServer({
|
||||
nginx: { lanIp: '192.168.1.1', sslEnabled: false, sslMode: 'no' } as const as Nginx,
|
||||
ports: {
|
||||
port: ':123',
|
||||
portSsl: ':445',
|
||||
defaultUrl: new URL('https://my-default-url.unraid.net'),
|
||||
},
|
||||
// @ts-expect-error Field doesn't exist
|
||||
field: 'idontexist',
|
||||
});
|
||||
await expect(getResult).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`[Error: IP URL Resolver: Could not resolve any access URL for field: "idontexist", is FQDN?: false]`
|
||||
);
|
||||
test('getUrlForServer - field does not exist, ssl disabled', () => {
|
||||
const getResult = async () => getUrlForServer(
|
||||
{
|
||||
nginx: { lanIp: '192.168.1.1', sslEnabled: false, sslMode: 'no' } as const as Nginx,
|
||||
ports: {
|
||||
port: ':123', portSsl: ':445', defaultUrl: new URL('https://my-default-url.unraid.net'),
|
||||
},
|
||||
// @ts-expect-error Field doesn't exist
|
||||
field: 'idontexist',
|
||||
});
|
||||
void expect(getResult).rejects.toThrowErrorMatchingInlineSnapshot(`[Error: IP URL Resolver: Could not resolve any access URL for field: "idontexist", is FQDN?: false]`);
|
||||
});
|
||||
|
||||
test('getUrlForServer - FQDN - field exists, port non-empty', () => {
|
||||
const result = getUrlForServer({
|
||||
nginx: { lanFqdn: 'my-fqdn.unraid.net', httpsPort: 445 } as const as Nginx,
|
||||
field: 'lanFqdn',
|
||||
});
|
||||
expect(result).toMatchInlineSnapshot('"https://my-fqdn.unraid.net:445/"');
|
||||
const result = getUrlForServer({
|
||||
nginx: { lanFqdn: 'my-fqdn.unraid.net', httpsPort: 445 } as const as Nginx,
|
||||
field: 'lanFqdn',
|
||||
});
|
||||
expect(result).toMatchInlineSnapshot('"https://my-fqdn.unraid.net:445/"');
|
||||
});
|
||||
|
||||
test('getUrlForServer - FQDN - field exists, port empty', () => {
|
||||
const result = getUrlForServer({
|
||||
nginx: { lanFqdn: 'my-fqdn.unraid.net', httpPort: 80, httpsPort: 443 } as const as Nginx,
|
||||
field: 'lanFqdn',
|
||||
});
|
||||
expect(result).toMatchInlineSnapshot('"https://my-fqdn.unraid.net/"');
|
||||
const result = getUrlForServer({ nginx: { lanFqdn: 'my-fqdn.unraid.net', httpPort: 80, httpsPort: 443 } as const as Nginx,
|
||||
field: 'lanFqdn',
|
||||
});
|
||||
expect(result).toMatchInlineSnapshot('"https://my-fqdn.unraid.net/"');
|
||||
});
|
||||
|
||||
test.each([
|
||||
[
|
||||
{
|
||||
nginx: {
|
||||
lanFqdn: 'my-fqdn.unraid.net',
|
||||
sslEnabled: false,
|
||||
sslMode: 'no',
|
||||
httpPort: 80,
|
||||
httpsPort: 443,
|
||||
} as const as Nginx,
|
||||
field: 'lanFqdn' as NginxUrlFields,
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
nginx: {
|
||||
wanFqdn: 'my-fqdn.unraid.net',
|
||||
sslEnabled: true,
|
||||
sslMode: 'yes',
|
||||
httpPort: 80,
|
||||
httpsPort: 443,
|
||||
} as const as Nginx,
|
||||
field: 'wanFqdn' as NginxUrlFields,
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
nginx: {
|
||||
wanFqdn6: 'my-fqdn.unraid.net',
|
||||
sslEnabled: true,
|
||||
sslMode: 'auto',
|
||||
httpPort: 80,
|
||||
httpsPort: 443,
|
||||
} as const as Nginx,
|
||||
field: 'wanFqdn6' as NginxUrlFields,
|
||||
},
|
||||
],
|
||||
[{ nginx: { lanFqdn: 'my-fqdn.unraid.net', sslEnabled: false, sslMode: 'no', httpPort: 80, httpsPort: 443 } as const as Nginx, field: 'lanFqdn' as NginxUrlFields }],
|
||||
[{ nginx: { wanFqdn: 'my-fqdn.unraid.net', sslEnabled: true, sslMode: 'yes', httpPort: 80, httpsPort: 443 } as const as Nginx, field: 'wanFqdn' as NginxUrlFields }],
|
||||
[{ nginx: { wanFqdn6: 'my-fqdn.unraid.net', sslEnabled: true, sslMode: 'auto', httpPort: 80, httpsPort: 443 } as const as Nginx, field: 'wanFqdn6' as NginxUrlFields }],
|
||||
|
||||
])('getUrlForServer - FQDN', ({ nginx, field }) => {
|
||||
const result = getUrlForServer({ nginx, field });
|
||||
expect(result.toString()).toBe('https://my-fqdn.unraid.net/');
|
||||
const result = getUrlForServer({ nginx, field });
|
||||
expect(result.toString()).toBe('https://my-fqdn.unraid.net/');
|
||||
});
|
||||
|
||||
test('getUrlForServer - field does not exist, ssl disabled', async () => {
|
||||
const getResult = async () =>
|
||||
getUrlForServer({
|
||||
nginx: { lanFqdn: 'my-fqdn.unraid.net' } as const as Nginx,
|
||||
ports: { portSsl: '', port: '', defaultUrl: new URL('https://my-default-url.unraid.net') },
|
||||
// @ts-expect-error Field doesn't exist
|
||||
field: 'idontexist',
|
||||
});
|
||||
await expect(getResult).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`[Error: IP URL Resolver: Could not resolve any access URL for field: "idontexist", is FQDN?: false]`
|
||||
);
|
||||
test('getUrlForServer - field does not exist, ssl disabled', () => {
|
||||
const getResult = async () => getUrlForServer({ nginx:
|
||||
{ lanFqdn: 'my-fqdn.unraid.net' } as const as Nginx,
|
||||
ports: { portSsl: '', port: '', defaultUrl: new URL('https://my-default-url.unraid.net') },
|
||||
// @ts-expect-error Field doesn't exist
|
||||
field: 'idontexist' });
|
||||
void expect(getResult).rejects.toThrowErrorMatchingInlineSnapshot(`[Error: IP URL Resolver: Could not resolve any access URL for field: "idontexist", is FQDN?: false]`);
|
||||
});
|
||||
|
||||
test('integration test, loading nginx ini and generating all URLs', async () => {
|
||||
await store.dispatch(loadStateFiles());
|
||||
await store.dispatch(loadConfigFile());
|
||||
await store.dispatch(loadStateFiles());
|
||||
await store.dispatch(loadConfigFile());
|
||||
|
||||
const urls = getServerIps();
|
||||
expect(urls.urls).toMatchInlineSnapshot(`
|
||||
const urls = getServerIps();
|
||||
expect(urls.urls).toMatchInlineSnapshot(`
|
||||
[
|
||||
{
|
||||
"ipv4": "https://tower.local:4443/",
|
||||
@@ -271,7 +198,7 @@ test('integration test, loading nginx ini and generating all URLs', async () =>
|
||||
},
|
||||
]
|
||||
`);
|
||||
expect(urls.errors).toMatchInlineSnapshot(`
|
||||
expect(urls.errors).toMatchInlineSnapshot(`
|
||||
[
|
||||
[Error: IP URL Resolver: Could not resolve any access URL for field: "lanIp6", is FQDN?: false],
|
||||
]
|
||||
|
||||
110
api/src/__test__/mothership/api-key/api-key-check-jobs.test.ts
Normal file
110
api/src/__test__/mothership/api-key/api-key-check-jobs.test.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
import { API_KEY_STATUS } from '@app/mothership/api-key/api-key-types';
|
||||
import * as apiKeyCheckJobs from '@app/mothership/jobs/api-key-check-jobs';
|
||||
import * as apiKeyValidator from '@app/mothership/api-key/validate-api-key-with-keyserver';
|
||||
import { describe, expect, it, vi } from 'vitest';
|
||||
import { type RecursivePartial } from '@app/types/index';
|
||||
import { type RootState } from '@app/store/index';
|
||||
|
||||
describe('apiKeyCheckJob Tests', () => {
|
||||
it('API Check Job (with success)', async () => {
|
||||
const getState = vi.fn<[], RecursivePartial<RootState>>().mockReturnValue({
|
||||
apiKey: { status: API_KEY_STATUS.PENDING_VALIDATION },
|
||||
config: { remote: { apikey: '_______________________BIG_API_KEY_HERE_________________________' } },
|
||||
emhttp: { var: { flashGuid: 'my-flash-guid', version: '6.11.5' } },
|
||||
});
|
||||
|
||||
const dispatch = vi.fn();
|
||||
|
||||
const validationSpy = vi.spyOn(apiKeyValidator, 'validateApiKeyWithKeyServer').mockResolvedValue(API_KEY_STATUS.API_KEY_VALID);
|
||||
|
||||
await expect(apiKeyCheckJobs.apiKeyCheckJob(getState, dispatch)).resolves.toBe(true);
|
||||
|
||||
expect(validationSpy).toHaveBeenCalledOnce();
|
||||
|
||||
expect(dispatch).toHaveBeenLastCalledWith({
|
||||
payload: API_KEY_STATUS.API_KEY_VALID,
|
||||
type: 'apiKey/setApiKeyState',
|
||||
});
|
||||
});
|
||||
|
||||
it('API Check Job (with invalid length key)', async () => {
|
||||
// Setup state
|
||||
const getState = vi.fn<[], RecursivePartial<RootState>>().mockReturnValue({
|
||||
apiKey: { status: API_KEY_STATUS.PENDING_VALIDATION },
|
||||
config: { remote: { apikey: 'too-short-key' } },
|
||||
emhttp: { var: { flashGuid: 'my-flash-guid', version: '6.11.5' } },
|
||||
});
|
||||
|
||||
const dispatch = vi.fn();
|
||||
|
||||
const validationSpy = vi.spyOn(apiKeyValidator, 'validateApiKeyWithKeyServer').mockResolvedValue(API_KEY_STATUS.API_KEY_VALID);
|
||||
|
||||
await expect(apiKeyCheckJobs.apiKeyCheckJob(getState, dispatch)).resolves.toBe(false);
|
||||
expect(dispatch).toHaveBeenCalledWith(expect.any(Function));
|
||||
|
||||
expect(validationSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('API Check Job (with a failure that throws an error - NETWORK_ERROR)', async () => {
|
||||
const getState = vi.fn<[], RecursivePartial<RootState>>().mockReturnValue({
|
||||
apiKey: { status: API_KEY_STATUS.PENDING_VALIDATION },
|
||||
config: { remote: { apikey: '_______________________BIG_API_KEY_HERE_________________________' } },
|
||||
emhttp: { var: { flashGuid: 'my-flash-guid', version: '6.11.5' } },
|
||||
});
|
||||
|
||||
const dispatch = vi.fn();
|
||||
|
||||
const validationSpy = vi.spyOn(apiKeyValidator, 'validateApiKeyWithKeyServer')
|
||||
.mockResolvedValueOnce(API_KEY_STATUS.NETWORK_ERROR);
|
||||
|
||||
await expect(apiKeyCheckJobs.apiKeyCheckJob(getState, dispatch)).rejects.toThrowErrorMatchingInlineSnapshot(`[Error: Keyserver Failure, must retry]`);
|
||||
|
||||
expect(validationSpy).toHaveBeenCalledOnce();
|
||||
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
payload: API_KEY_STATUS.NETWORK_ERROR,
|
||||
type: 'apiKey/setApiKeyState',
|
||||
});
|
||||
});
|
||||
|
||||
it('API Check Job (with a failure that throws an error - INVALID_RESPONSE)', async () => {
|
||||
const getState = vi.fn<[], RecursivePartial<RootState>>().mockReturnValue({
|
||||
apiKey: { status: API_KEY_STATUS.PENDING_VALIDATION },
|
||||
config: { remote: { apikey: '_______________________BIG_API_KEY_HERE_________________________' } },
|
||||
emhttp: { var: { flashGuid: 'my-flash-guid', version: '6.11.5' } },
|
||||
});
|
||||
|
||||
const dispatch = vi.fn();
|
||||
|
||||
const validationSpy = vi.spyOn(apiKeyValidator, 'validateApiKeyWithKeyServer')
|
||||
.mockResolvedValueOnce(API_KEY_STATUS.INVALID_KEYSERVER_RESPONSE);
|
||||
|
||||
await expect(apiKeyCheckJobs.apiKeyCheckJob(getState, dispatch)).rejects.toThrowErrorMatchingInlineSnapshot(`[Error: Keyserver Failure, must retry]`);
|
||||
|
||||
expect(validationSpy).toHaveBeenCalledOnce();
|
||||
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
payload: API_KEY_STATUS.INVALID_KEYSERVER_RESPONSE,
|
||||
type: 'apiKey/setApiKeyState',
|
||||
});
|
||||
}, 10_000);
|
||||
|
||||
it('API Check Job (with failure that results in a log out)', async () => {
|
||||
const getState = vi.fn<[], RecursivePartial<RootState>>().mockReturnValue({
|
||||
apiKey: { status: API_KEY_STATUS.PENDING_VALIDATION },
|
||||
config: { remote: { apikey: '_______________________BIG_API_KEY_HERE_________________________' } },
|
||||
emhttp: { var: { flashGuid: 'my-flash-guid', version: '6.11.5' } },
|
||||
});
|
||||
|
||||
const dispatch = vi.fn();
|
||||
|
||||
const validationSpy = vi.spyOn(apiKeyValidator, 'validateApiKeyWithKeyServer')
|
||||
.mockResolvedValue(API_KEY_STATUS.API_KEY_INVALID);
|
||||
|
||||
await expect(apiKeyCheckJobs.apiKeyCheckJob(getState, dispatch)).resolves.toBe(false);
|
||||
|
||||
expect(validationSpy).toHaveBeenCalledOnce();
|
||||
expect(dispatch).toHaveBeenCalledTimes(1);
|
||||
expect(dispatch).toHaveBeenCalledWith(expect.any(Function));
|
||||
}, 10_000);
|
||||
});
|
||||
@@ -4,45 +4,45 @@ import { beforeEach, expect, test, vi } from 'vitest';
|
||||
import '@app/mothership/utils/convert-to-fuzzy-time';
|
||||
|
||||
vi.mock('fs', () => ({
|
||||
default: {
|
||||
readFileSync: vi.fn().mockReturnValue('my-file'),
|
||||
writeFileSync: vi.fn(),
|
||||
existsSync: vi.fn(),
|
||||
},
|
||||
readFileSync: vi.fn().mockReturnValue('my-file'),
|
||||
existsSync: vi.fn(),
|
||||
default: {
|
||||
readFileSync: vi.fn().mockReturnValue('my-file'),
|
||||
writeFileSync: vi.fn(),
|
||||
existsSync: vi.fn(),
|
||||
},
|
||||
readFileSync: vi.fn().mockReturnValue('my-file'),
|
||||
existsSync: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('@graphql-tools/schema', () => ({
|
||||
makeExecutableSchema: vi.fn(),
|
||||
makeExecutableSchema: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('@app/core/log', () => ({
|
||||
default: { relayLogger: { trace: vi.fn() } },
|
||||
relayLogger: { trace: vi.fn() },
|
||||
logger: { trace: vi.fn() },
|
||||
default: { relayLogger: { trace: vi.fn() } },
|
||||
relayLogger: { trace: vi.fn() },
|
||||
logger: { trace: vi.fn() },
|
||||
}));
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetModules();
|
||||
vi.clearAllMocks();
|
||||
vi.resetModules();
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
const generateTestCases = () => {
|
||||
const cases: Array<{ min: number; max: number }> = [];
|
||||
for (let i = 0; i < 15; i += 1) {
|
||||
const min = Math.round(Math.random() * 100);
|
||||
const max = min + Math.round(Math.random() * 20);
|
||||
cases.push({ min, max });
|
||||
}
|
||||
const cases: Array<{ min: number; max: number }> = [];
|
||||
for (let i = 0; i < 15; i += 1) {
|
||||
const min = Math.round(Math.random() * 100);
|
||||
const max = min + (Math.round(Math.random() * 20));
|
||||
cases.push({ min, max });
|
||||
}
|
||||
|
||||
return cases;
|
||||
return cases;
|
||||
};
|
||||
|
||||
test.each(generateTestCases())('Successfully converts to fuzzy time %o', async ({ min, max }) => {
|
||||
const { convertToFuzzyTime } = await import('@app/mothership/utils/convert-to-fuzzy-time');
|
||||
const { convertToFuzzyTime } = await import('@app/mothership/utils/convert-to-fuzzy-time');
|
||||
|
||||
const res = convertToFuzzyTime(min, max);
|
||||
expect(res).toBeGreaterThanOrEqual(min);
|
||||
expect(res).toBeLessThanOrEqual(max);
|
||||
const res = convertToFuzzyTime(min, max);
|
||||
expect(res).toBeGreaterThanOrEqual(min);
|
||||
expect(res).toBeLessThanOrEqual(max);
|
||||
});
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { config } from 'dotenv';
|
||||
|
||||
config({
|
||||
path: './.env.test',
|
||||
debug: false,
|
||||
encoding: 'utf-8',
|
||||
});
|
||||
})
|
||||
@@ -1,6 +0,0 @@
|
||||
import { vi } from 'vitest';
|
||||
|
||||
vi.mock('@app/core/utils/misc/send-form-to-keyserver', () => {
|
||||
const sendFormToKeyServer = vi.fn().mockResolvedValue({ body: JSON.stringify({ valid: true }) });
|
||||
return { sendFormToKeyServer };
|
||||
});
|
||||
@@ -1,36 +0,0 @@
|
||||
// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html
|
||||
|
||||
exports[`Before init returns default values for all fields 1`] = `
|
||||
{
|
||||
"api": {
|
||||
"extraOrigins": "",
|
||||
"version": "",
|
||||
},
|
||||
"connectionStatus": {
|
||||
"minigraph": "PRE_INIT",
|
||||
"upnpStatus": "",
|
||||
},
|
||||
"local": {
|
||||
"sandbox": "no",
|
||||
},
|
||||
"nodeEnv": "test",
|
||||
"remote": {
|
||||
"accesstoken": "",
|
||||
"allowedOrigins": "",
|
||||
"apikey": "",
|
||||
"avatar": "",
|
||||
"dynamicRemoteAccessType": "DISABLED",
|
||||
"email": "",
|
||||
"idtoken": "",
|
||||
"localApiKey": "",
|
||||
"refreshtoken": "",
|
||||
"regWizTime": "",
|
||||
"ssoSubIds": "",
|
||||
"upnpEnabled": "",
|
||||
"username": "",
|
||||
"wanaccess": "",
|
||||
"wanport": "",
|
||||
},
|
||||
"status": "UNLOADED",
|
||||
}
|
||||
`;
|
||||
@@ -1,11 +1,48 @@
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
import { test, expect } from 'vitest';
|
||||
import { store } from '@app/store';
|
||||
import { MyServersConfigMemory } from '@app/types/my-servers-config';
|
||||
|
||||
test('Before init returns default values for all fields', async () => {
|
||||
const state = store.getState().config;
|
||||
expect(state).toMatchSnapshot();
|
||||
expect(state).toMatchInlineSnapshot(`
|
||||
{
|
||||
"api": {
|
||||
"extraOrigins": "",
|
||||
"version": "",
|
||||
},
|
||||
"connectionStatus": {
|
||||
"minigraph": "PRE_INIT",
|
||||
"upnpStatus": "",
|
||||
},
|
||||
"local": {
|
||||
"2Fa": "",
|
||||
"showT2Fa": "",
|
||||
},
|
||||
"nodeEnv": "test",
|
||||
"notifier": {
|
||||
"apikey": "",
|
||||
},
|
||||
"remote": {
|
||||
"2Fa": "",
|
||||
"accesstoken": "",
|
||||
"allowedOrigins": "",
|
||||
"apikey": "",
|
||||
"avatar": "",
|
||||
"dynamicRemoteAccessType": "DISABLED",
|
||||
"email": "",
|
||||
"idtoken": "",
|
||||
"refreshtoken": "",
|
||||
"regWizTime": "",
|
||||
"upnpEnabled": "",
|
||||
"username": "",
|
||||
"wanaccess": "",
|
||||
"wanport": "",
|
||||
},
|
||||
"status": "UNLOADED",
|
||||
"upc": {
|
||||
"apikey": "",
|
||||
},
|
||||
}
|
||||
`);
|
||||
}, 10_000);
|
||||
|
||||
test('After init returns values from cfg file for all fields', async () => {
|
||||
@@ -27,10 +64,15 @@ test('After init returns values from cfg file for all fields', async () => {
|
||||
upnpStatus: '',
|
||||
},
|
||||
local: {
|
||||
sandbox: expect.any(String),
|
||||
'2Fa': '',
|
||||
showT2Fa: '',
|
||||
},
|
||||
nodeEnv: 'test',
|
||||
notifier: {
|
||||
apikey: 'unnotify_30994bfaccf839c65bae75f7fa12dd5ee16e69389f754c3b98ed7d5',
|
||||
},
|
||||
remote: {
|
||||
'2Fa': '',
|
||||
accesstoken: '',
|
||||
allowedOrigins: '',
|
||||
apikey: '_______________________BIG_API_KEY_HERE_________________________',
|
||||
@@ -38,22 +80,25 @@ test('After init returns values from cfg file for all fields', async () => {
|
||||
dynamicRemoteAccessType: 'DISABLED',
|
||||
email: 'test@example.com',
|
||||
idtoken: '',
|
||||
localApiKey: '_______________________LOCAL_API_KEY_HERE_________________________',
|
||||
refreshtoken: '',
|
||||
regWizTime: '1611175408732_0951-1653-3509-FBA155FA23C0',
|
||||
ssoSubIds: '',
|
||||
upnpEnabled: 'no',
|
||||
username: 'zspearmint',
|
||||
wanaccess: 'yes',
|
||||
wanport: '8443',
|
||||
},
|
||||
status: 'LOADED',
|
||||
upc: {
|
||||
apikey: 'unupc_fab6ff6ffe51040595c6d9ffb63a353ba16cc2ad7d93f813a2e80a5810',
|
||||
},
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
test('updateUserConfig merges in changes to current state', async () => {
|
||||
const { loadConfigFile, updateUserConfig } = await import('@app/store/modules/config');
|
||||
const { loadConfigFile, updateUserConfig } = await import(
|
||||
'@app/store/modules/config'
|
||||
);
|
||||
|
||||
// Load cfg into store
|
||||
await store.dispatch(loadConfigFile());
|
||||
@@ -77,10 +122,15 @@ test('updateUserConfig merges in changes to current state', async () => {
|
||||
upnpStatus: '',
|
||||
},
|
||||
local: {
|
||||
sandbox: expect.any(String),
|
||||
'2Fa': '',
|
||||
showT2Fa: '',
|
||||
},
|
||||
nodeEnv: 'test',
|
||||
notifier: {
|
||||
apikey: 'unnotify_30994bfaccf839c65bae75f7fa12dd5ee16e69389f754c3b98ed7d5',
|
||||
},
|
||||
remote: {
|
||||
'2Fa': '',
|
||||
accesstoken: '',
|
||||
allowedOrigins: '',
|
||||
apikey: '_______________________BIG_API_KEY_HERE_________________________',
|
||||
@@ -88,16 +138,17 @@ test('updateUserConfig merges in changes to current state', async () => {
|
||||
dynamicRemoteAccessType: 'DISABLED',
|
||||
email: 'test@example.com',
|
||||
idtoken: '',
|
||||
localApiKey: '_______________________LOCAL_API_KEY_HERE_________________________',
|
||||
refreshtoken: '',
|
||||
regWizTime: '1611175408732_0951-1653-3509-FBA155FA23C0',
|
||||
ssoSubIds: '',
|
||||
upnpEnabled: 'no',
|
||||
username: 'zspearmint',
|
||||
wanaccess: 'yes',
|
||||
wanport: '8443',
|
||||
},
|
||||
status: 'LOADED',
|
||||
} as MyServersConfigMemory)
|
||||
upc: {
|
||||
apikey: 'unupc_fab6ff6ffe51040595c6d9ffb63a353ba16cc2ad7d93f813a2e80a5810',
|
||||
},
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
import { test, expect } from 'vitest';
|
||||
import { store } from '@app/store';
|
||||
import { FileLoadStatus } from '@app/store/types';
|
||||
|
||||
@@ -7,9 +6,9 @@ import { FileLoadStatus } from '@app/store/types';
|
||||
import '@app/store/modules/emhttp';
|
||||
|
||||
test('Before init returns default values for all fields', async () => {
|
||||
const { status, ...state } = store.getState().emhttp;
|
||||
expect(status).toBe(FileLoadStatus.UNLOADED);
|
||||
expect(state).toMatchInlineSnapshot(`
|
||||
const { status, ...state } = store.getState().emhttp;
|
||||
expect(status).toBe(FileLoadStatus.UNLOADED);
|
||||
expect(state).toMatchInlineSnapshot(`
|
||||
{
|
||||
"devices": [],
|
||||
"disks": [],
|
||||
@@ -25,27 +24,16 @@ test('Before init returns default values for all fields', async () => {
|
||||
});
|
||||
|
||||
test('After init returns values from cfg file for all fields', async () => {
|
||||
const { loadStateFiles } = await import('@app/store/modules/emhttp');
|
||||
const { loadStateFiles } = await import('@app/store/modules/emhttp');
|
||||
|
||||
// Load state files into store
|
||||
await store.dispatch(loadStateFiles());
|
||||
// Load state files into store
|
||||
await store.dispatch(loadStateFiles());
|
||||
|
||||
// Check if store has state files loaded
|
||||
const {
|
||||
devices,
|
||||
networks,
|
||||
nfsShares,
|
||||
nginx,
|
||||
shares,
|
||||
disks,
|
||||
smbShares,
|
||||
status,
|
||||
users,
|
||||
var: varState,
|
||||
} = store.getState().emhttp;
|
||||
expect(status).toBe(FileLoadStatus.LOADED);
|
||||
expect(devices).toMatchInlineSnapshot('[]');
|
||||
expect(networks).toMatchInlineSnapshot(`
|
||||
// Check if store has state files loaded
|
||||
const { devices, networks, nfsShares, nginx, shares, disks, smbShares, status, users, var: varState } = store.getState().emhttp;
|
||||
expect(status).toBe(FileLoadStatus.LOADED);
|
||||
expect(devices).toMatchInlineSnapshot('[]');
|
||||
expect(networks).toMatchInlineSnapshot(`
|
||||
[
|
||||
{
|
||||
"bonding": true,
|
||||
@@ -111,7 +99,7 @@ test('After init returns values from cfg file for all fields', async () => {
|
||||
},
|
||||
]
|
||||
`);
|
||||
expect(nginx).toMatchInlineSnapshot(`
|
||||
expect(nginx).toMatchInlineSnapshot(`
|
||||
{
|
||||
"certificateName": "*.thisisfourtyrandomcharacters012345678900.myunraid.net",
|
||||
"certificatePath": "/boot/config/ssl/certs/certificate_bundle.pem",
|
||||
@@ -196,7 +184,7 @@ test('After init returns values from cfg file for all fields', async () => {
|
||||
"wanIp": "",
|
||||
}
|
||||
`);
|
||||
expect(disks).toMatchInlineSnapshot(`
|
||||
expect(disks).toMatchInlineSnapshot(`
|
||||
[
|
||||
{
|
||||
"comment": null,
|
||||
@@ -368,7 +356,7 @@ test('After init returns values from cfg file for all fields', async () => {
|
||||
},
|
||||
]
|
||||
`);
|
||||
expect(shares).toMatchInlineSnapshot(`
|
||||
expect(shares).toMatchInlineSnapshot(`
|
||||
[
|
||||
{
|
||||
"allocator": "highwater",
|
||||
@@ -444,7 +432,7 @@ test('After init returns values from cfg file for all fields', async () => {
|
||||
},
|
||||
]
|
||||
`);
|
||||
expect(nfsShares).toMatchInlineSnapshot(`
|
||||
expect(nfsShares).toMatchInlineSnapshot(`
|
||||
[
|
||||
{
|
||||
"enabled": false,
|
||||
@@ -632,7 +620,7 @@ test('After init returns values from cfg file for all fields', async () => {
|
||||
},
|
||||
]
|
||||
`);
|
||||
expect(smbShares).toMatchInlineSnapshot(`
|
||||
expect(smbShares).toMatchInlineSnapshot(`
|
||||
[
|
||||
{
|
||||
"caseSensitive": "auto",
|
||||
@@ -923,7 +911,7 @@ test('After init returns values from cfg file for all fields', async () => {
|
||||
},
|
||||
]
|
||||
`);
|
||||
expect(users).toMatchInlineSnapshot(`
|
||||
expect(users).toMatchInlineSnapshot(`
|
||||
[
|
||||
{
|
||||
"description": "Console and webGui login account",
|
||||
@@ -948,7 +936,7 @@ test('After init returns values from cfg file for all fields', async () => {
|
||||
},
|
||||
]
|
||||
`);
|
||||
expect(varState).toMatchInlineSnapshot(`
|
||||
expect(varState).toMatchInlineSnapshot(`
|
||||
{
|
||||
"bindMgt": false,
|
||||
"cacheNumDevices": NaN,
|
||||
|
||||
14
api/src/__test__/store/modules/notifications.test.ts
Normal file
14
api/src/__test__/store/modules/notifications.test.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { setupNotificationWatch } from '@app/core/modules/notifications/setup-notification-watch';
|
||||
import { sleep } from '@app/core/utils/misc/sleep';
|
||||
import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-file';
|
||||
import { store } from '@app/store/index';
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
test('loads notifications properly', async () => {
|
||||
await store.dispatch(loadDynamixConfigFile()).unwrap();
|
||||
const watch = await setupNotificationWatch();
|
||||
expect(watch).not.toBeNull();
|
||||
await sleep(400);
|
||||
expect(store.getState().notifications.notifications).toMatchSnapshot();
|
||||
await watch?.close();
|
||||
});
|
||||
@@ -1,10 +1,9 @@
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
import { store } from '@app/store';
|
||||
|
||||
test('Returns paths', async () => {
|
||||
const { paths } = store.getState();
|
||||
expect(Object.keys(paths)).toMatchInlineSnapshot(`
|
||||
const { paths } = store.getState();
|
||||
expect(Object.keys(paths)).toMatchInlineSnapshot(`
|
||||
[
|
||||
"core",
|
||||
"unraid-api-base",
|
||||
@@ -27,7 +26,6 @@ test('Returns paths', async () => {
|
||||
"log-base",
|
||||
"var-run",
|
||||
"auth-sessions",
|
||||
"auth-keys",
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
@@ -1,66 +0,0 @@
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
import { store } from '@app/store';
|
||||
import { loadRegistrationKey } from '@app/store/modules/registration';
|
||||
import { FileLoadStatus, StateFileKey } from '@app/store/types';
|
||||
|
||||
// Preloading imports for faster tests
|
||||
|
||||
test('Before loading key returns null', async () => {
|
||||
const { status, keyFile } = store.getState().registration;
|
||||
expect(status).toBe(FileLoadStatus.UNLOADED);
|
||||
expect(keyFile).toBe(null);
|
||||
});
|
||||
|
||||
test('Requires emhttp to be loaded to find key file', async () => {
|
||||
// Load registration key into store
|
||||
await store.dispatch(loadRegistrationKey());
|
||||
|
||||
// Check if store has state files loaded
|
||||
const { status, keyFile } = store.getState().registration;
|
||||
|
||||
expect(status).toBe(FileLoadStatus.LOADED);
|
||||
expect(keyFile).toBe(null);
|
||||
});
|
||||
|
||||
test('Returns empty key if key location is empty', async () => {
|
||||
const { updateEmhttpState } = await import('@app/store/modules/emhttp');
|
||||
const { loadRegistrationKey } = await import('@app/store/modules/registration');
|
||||
|
||||
// Set key file location as empty
|
||||
// This should only happen if the user doesn't have a key file
|
||||
store.dispatch(
|
||||
updateEmhttpState({
|
||||
field: StateFileKey.var,
|
||||
state: {
|
||||
regFile: '',
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
// Load registration key into store
|
||||
await store.dispatch(loadRegistrationKey());
|
||||
|
||||
// Check if store has state files loaded
|
||||
const { status, keyFile } = store.getState().registration;
|
||||
expect(status).toBe(FileLoadStatus.LOADED);
|
||||
expect(keyFile).toBe('');
|
||||
});
|
||||
|
||||
test('Returns decoded key file if key location exists', async () => {
|
||||
const { loadRegistrationKey } = await import('@app/store/modules/registration');
|
||||
const { loadStateFiles } = await import('@app/store/modules/emhttp');
|
||||
|
||||
// Load state files into store
|
||||
await store.dispatch(loadStateFiles());
|
||||
|
||||
// Load registration key into store
|
||||
await store.dispatch(loadRegistrationKey());
|
||||
|
||||
// Check if store has state files loaded
|
||||
const { status, keyFile } = store.getState().registration;
|
||||
expect(status).toBe(FileLoadStatus.LOADED);
|
||||
expect(keyFile).toMatchInlineSnapshot(
|
||||
'"hVs1tLjvC9FiiQsIwIQ7G1KszAcexf0IneThhnmf22SB0dGs5WzRkqMiSMmt2DtR5HOXFUD32YyxuzGeUXmky3zKpSu6xhZNKVg5atGM1OfvkzHBMldI3SeBLuUFSgejLbpNUMdTrbk64JJdbzle4O8wiQgkIpAMIGxeYLwLBD4zHBcfyzq40QnxG--HcX6j25eE0xqa2zWj-j0b0rCAXahJV2a3ySCbPzr1MvfPRTVb0rr7KJ-25R592hYrz4H7Sc1B3p0lr6QUxHE6o7bcYrWKDRtIVoZ8SMPpd1_0gzYIcl5GsDFzFumTXUh8NEnl0Q8hwW1YE-tRc6Y_rrvd7w"'
|
||||
);
|
||||
});
|
||||
@@ -1,18 +0,0 @@
|
||||
import { join } from 'path';
|
||||
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
import type { DevicesIni } from '@app/store/state-parsers/devices';
|
||||
import { store } from '@app/store';
|
||||
|
||||
test('Returns parsed state file', async () => {
|
||||
const { parse } = await import('@app/store/state-parsers/devices');
|
||||
const { parseConfig } = await import('@app/core/utils/misc/parse-config');
|
||||
const { paths } = store.getState();
|
||||
const filePath = join(paths.states, 'devs.ini');
|
||||
const stateFile = parseConfig<DevicesIni>({
|
||||
filePath,
|
||||
type: 'ini',
|
||||
});
|
||||
expect(parse(stateFile)).toMatchInlineSnapshot('[]');
|
||||
});
|
||||
@@ -1,83 +0,0 @@
|
||||
import { join } from 'path';
|
||||
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
import type { NetworkIni } from '@app/store/state-parsers/network';
|
||||
import { store } from '@app/store';
|
||||
|
||||
test('Returns parsed state file', async () => {
|
||||
const { parse } = await import('@app/store/state-parsers/network');
|
||||
const { parseConfig } = await import('@app/core/utils/misc/parse-config');
|
||||
const { paths } = store.getState();
|
||||
const filePath = join(paths.states, 'network.ini');
|
||||
const stateFile = parseConfig<NetworkIni>({
|
||||
filePath,
|
||||
type: 'ini',
|
||||
});
|
||||
expect(parse(stateFile)).toMatchInlineSnapshot(`
|
||||
[
|
||||
{
|
||||
"bonding": true,
|
||||
"bondingMiimon": "100",
|
||||
"bondingMode": "1",
|
||||
"bondname": "",
|
||||
"bondnics": [
|
||||
"eth0",
|
||||
"eth1",
|
||||
"eth2",
|
||||
"eth3",
|
||||
],
|
||||
"brfd": "0",
|
||||
"bridging": true,
|
||||
"brname": "",
|
||||
"brnics": "bond0",
|
||||
"brstp": "0",
|
||||
"description": [
|
||||
"",
|
||||
],
|
||||
"dhcp6Keepresolv": false,
|
||||
"dhcpKeepresolv": false,
|
||||
"dnsServer1": "1.1.1.1",
|
||||
"dnsServer2": "8.8.8.8",
|
||||
"gateway": [
|
||||
"192.168.1.1",
|
||||
],
|
||||
"gateway6": [
|
||||
"",
|
||||
],
|
||||
"ipaddr": [
|
||||
"192.168.1.150",
|
||||
],
|
||||
"ipaddr6": [
|
||||
"",
|
||||
],
|
||||
"metric": [
|
||||
"",
|
||||
],
|
||||
"metric6": [
|
||||
"",
|
||||
],
|
||||
"mtu": "",
|
||||
"netmask": [
|
||||
"255.255.255.0",
|
||||
],
|
||||
"netmask6": [
|
||||
"",
|
||||
],
|
||||
"privacy6": [
|
||||
"",
|
||||
],
|
||||
"protocol": [
|
||||
"",
|
||||
],
|
||||
"type": "access",
|
||||
"useDhcp": [
|
||||
true,
|
||||
],
|
||||
"useDhcp6": [
|
||||
false,
|
||||
],
|
||||
},
|
||||
]
|
||||
`);
|
||||
});
|
||||
@@ -1,205 +0,0 @@
|
||||
import { join } from 'path';
|
||||
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
import type { NfsSharesIni } from '@app/store/state-parsers/nfs';
|
||||
import { store } from '@app/store';
|
||||
|
||||
test('Returns parsed state file', async () => {
|
||||
const { parse } = await import('@app/store/state-parsers/nfs');
|
||||
const { parseConfig } = await import('@app/core/utils/misc/parse-config');
|
||||
const { paths } = store.getState();
|
||||
const filePath = join(paths.states, 'sec_nfs.ini');
|
||||
const stateFile = parseConfig<NfsSharesIni>({
|
||||
filePath,
|
||||
type: 'ini',
|
||||
});
|
||||
expect(parse(stateFile)).toMatchInlineSnapshot(`
|
||||
[
|
||||
{
|
||||
"enabled": false,
|
||||
"hostList": "",
|
||||
"name": "disk1",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"enabled": false,
|
||||
"hostList": "",
|
||||
"name": "disk2",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"enabled": false,
|
||||
"hostList": "",
|
||||
"name": "disk3",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"enabled": false,
|
||||
"hostList": "",
|
||||
"name": "disk4",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"enabled": false,
|
||||
"hostList": "",
|
||||
"name": "disk5",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"enabled": false,
|
||||
"hostList": "",
|
||||
"name": "disk6",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"enabled": false,
|
||||
"hostList": "",
|
||||
"name": "disk7",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"enabled": false,
|
||||
"hostList": "",
|
||||
"name": "disk8",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"enabled": false,
|
||||
"hostList": "",
|
||||
"name": "disk9",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"enabled": false,
|
||||
"hostList": "",
|
||||
"name": "disk10",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"enabled": false,
|
||||
"hostList": "",
|
||||
"name": "disk11",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"enabled": false,
|
||||
"hostList": "",
|
||||
"name": "disk12",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"enabled": false,
|
||||
"hostList": "",
|
||||
"name": "disk13",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"enabled": false,
|
||||
"hostList": "",
|
||||
"name": "disk14",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"enabled": false,
|
||||
"hostList": "",
|
||||
"name": "disk15",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"enabled": false,
|
||||
"hostList": "",
|
||||
"name": "disk16",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"enabled": false,
|
||||
"hostList": "",
|
||||
"name": "disk17",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"enabled": false,
|
||||
"hostList": "",
|
||||
"name": "disk18",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"enabled": false,
|
||||
"hostList": "",
|
||||
"name": "disk19",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"enabled": false,
|
||||
"hostList": "",
|
||||
"name": "disk20",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"enabled": false,
|
||||
"hostList": "",
|
||||
"name": "disk21",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"enabled": false,
|
||||
"hostList": "",
|
||||
"name": "disk22",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"enabled": false,
|
||||
"hostList": "",
|
||||
"name": "abc",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"writeList": [],
|
||||
},
|
||||
]
|
||||
`);
|
||||
});
|
||||
@@ -1,18 +1,16 @@
|
||||
import { join } from 'path';
|
||||
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
import type { NginxIni } from '@app/store/state-parsers/nginx';
|
||||
import { store } from '@app/store';
|
||||
import type { NginxIni } from '@app/store/state-parsers/nginx';
|
||||
|
||||
test('Returns parsed state file', async () => {
|
||||
const { parse } = await import('@app/store/state-parsers/nginx');
|
||||
const { parseConfig } = await import('@app/core/utils/misc/parse-config');
|
||||
const { paths } = store.getState();
|
||||
const filePath = join(paths.states, 'nginx.ini');
|
||||
const stateFile = parseConfig<NginxIni>({
|
||||
filePath,
|
||||
type: 'ini',
|
||||
});
|
||||
expect(parse(stateFile)).toMatchSnapshot();
|
||||
});
|
||||
const { parse } = await import('@app/store/state-parsers/nginx');
|
||||
const { parseConfig } = await import('@app/core/utils/misc/parse-config');
|
||||
const { paths } = store.getState();
|
||||
const filePath = join(paths.states, 'nginx.ini');
|
||||
const stateFile = parseConfig<NginxIni>({
|
||||
filePath,
|
||||
type: 'ini',
|
||||
});
|
||||
expect(parse(stateFile)).toMatchSnapshot();
|
||||
});
|
||||
@@ -1,20 +1,18 @@
|
||||
import { join } from 'path';
|
||||
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
import type { SharesIni } from '@app/store/state-parsers/shares';
|
||||
import { store } from '@app/store';
|
||||
import type { SharesIni } from '@app/store/state-parsers/shares';
|
||||
|
||||
test('Returns parsed state file', async () => {
|
||||
const { parse } = await import('@app/store/state-parsers/shares');
|
||||
const { parseConfig } = await import('@app/core/utils/misc/parse-config');
|
||||
const { paths } = store.getState();
|
||||
const filePath = join(paths.states, 'shares.ini');
|
||||
const stateFile = parseConfig<SharesIni>({
|
||||
filePath,
|
||||
type: 'ini',
|
||||
});
|
||||
expect(parse(stateFile)).toMatchInlineSnapshot(`
|
||||
const { parse } = await import('@app/store/state-parsers/shares');
|
||||
const { parseConfig } = await import('@app/core/utils/misc/parse-config');
|
||||
const { paths } = store.getState();
|
||||
const filePath = join(paths.states, 'shares.ini');
|
||||
const stateFile = parseConfig<SharesIni>({
|
||||
filePath,
|
||||
type: 'ini',
|
||||
});
|
||||
expect(parse(stateFile)).toMatchInlineSnapshot(`
|
||||
[
|
||||
{
|
||||
"allocator": "highwater",
|
||||
|
||||
@@ -1,20 +1,18 @@
|
||||
import { join } from 'path';
|
||||
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
import type { SlotsIni } from '@app/store/state-parsers/slots';
|
||||
import { store } from '@app/store';
|
||||
import type { SlotsIni } from '@app/store/state-parsers/slots';
|
||||
|
||||
test('Returns parsed state file', async () => {
|
||||
const { parse } = await import('@app/store/state-parsers/slots');
|
||||
const { parseConfig } = await import('@app/core/utils/misc/parse-config');
|
||||
const { paths } = store.getState();
|
||||
const filePath = join(paths.states, 'disks.ini');
|
||||
const stateFile = parseConfig<SlotsIni>({
|
||||
filePath,
|
||||
type: 'ini',
|
||||
});
|
||||
expect(parse(stateFile)).toMatchInlineSnapshot(`
|
||||
const { parse } = await import('@app/store/state-parsers/slots');
|
||||
const { parseConfig } = await import('@app/core/utils/misc/parse-config');
|
||||
const { paths } = store.getState();
|
||||
const filePath = join(paths.states, 'disks.ini');
|
||||
const stateFile = parseConfig<SlotsIni>({
|
||||
filePath,
|
||||
type: 'ini',
|
||||
});
|
||||
expect(parse(stateFile)).toMatchInlineSnapshot(`
|
||||
[
|
||||
{
|
||||
"comment": null,
|
||||
|
||||
@@ -1,308 +0,0 @@
|
||||
import { join } from 'path';
|
||||
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
import type { SmbIni } from '@app/store/state-parsers/smb';
|
||||
import { store } from '@app/store';
|
||||
|
||||
test('Returns parsed state file', async () => {
|
||||
const { parse } = await import('@app/store/state-parsers/smb');
|
||||
const { parseConfig } = await import('@app/core/utils/misc/parse-config');
|
||||
const { paths } = store.getState();
|
||||
const filePath = join(paths.states, 'sec.ini');
|
||||
const stateFile = parseConfig<SmbIni>({
|
||||
filePath,
|
||||
type: 'ini',
|
||||
});
|
||||
expect(parse(stateFile)).toMatchInlineSnapshot(`
|
||||
[
|
||||
{
|
||||
"caseSensitive": "auto",
|
||||
"enabled": true,
|
||||
"fruit": "no",
|
||||
"name": "disk1",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"timemachine": {
|
||||
"volsizelimit": NaN,
|
||||
},
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"caseSensitive": "auto",
|
||||
"enabled": true,
|
||||
"fruit": "no",
|
||||
"name": "disk2",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"timemachine": {
|
||||
"volsizelimit": NaN,
|
||||
},
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"caseSensitive": "auto",
|
||||
"enabled": true,
|
||||
"fruit": "no",
|
||||
"name": "disk3",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"timemachine": {
|
||||
"volsizelimit": NaN,
|
||||
},
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"caseSensitive": "auto",
|
||||
"enabled": true,
|
||||
"fruit": "no",
|
||||
"name": "disk4",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"timemachine": {
|
||||
"volsizelimit": NaN,
|
||||
},
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"caseSensitive": "auto",
|
||||
"enabled": true,
|
||||
"fruit": "no",
|
||||
"name": "disk5",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"timemachine": {
|
||||
"volsizelimit": NaN,
|
||||
},
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"caseSensitive": "auto",
|
||||
"enabled": true,
|
||||
"fruit": "no",
|
||||
"name": "disk6",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"timemachine": {
|
||||
"volsizelimit": NaN,
|
||||
},
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"caseSensitive": "auto",
|
||||
"enabled": true,
|
||||
"fruit": "no",
|
||||
"name": "disk7",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"timemachine": {
|
||||
"volsizelimit": NaN,
|
||||
},
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"caseSensitive": "auto",
|
||||
"enabled": true,
|
||||
"fruit": "no",
|
||||
"name": "disk8",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"timemachine": {
|
||||
"volsizelimit": NaN,
|
||||
},
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"caseSensitive": "auto",
|
||||
"enabled": true,
|
||||
"fruit": "no",
|
||||
"name": "disk9",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"timemachine": {
|
||||
"volsizelimit": NaN,
|
||||
},
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"caseSensitive": "auto",
|
||||
"enabled": true,
|
||||
"fruit": "no",
|
||||
"name": "disk10",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"timemachine": {
|
||||
"volsizelimit": NaN,
|
||||
},
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"caseSensitive": "auto",
|
||||
"enabled": true,
|
||||
"fruit": "no",
|
||||
"name": "disk11",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"timemachine": {
|
||||
"volsizelimit": NaN,
|
||||
},
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"caseSensitive": "auto",
|
||||
"enabled": true,
|
||||
"fruit": "no",
|
||||
"name": "disk12",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"timemachine": {
|
||||
"volsizelimit": NaN,
|
||||
},
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"caseSensitive": "auto",
|
||||
"enabled": true,
|
||||
"fruit": "no",
|
||||
"name": "disk13",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"timemachine": {
|
||||
"volsizelimit": NaN,
|
||||
},
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"caseSensitive": "auto",
|
||||
"enabled": true,
|
||||
"fruit": "no",
|
||||
"name": "disk14",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"timemachine": {
|
||||
"volsizelimit": NaN,
|
||||
},
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"caseSensitive": "auto",
|
||||
"enabled": true,
|
||||
"fruit": "no",
|
||||
"name": "disk15",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"timemachine": {
|
||||
"volsizelimit": NaN,
|
||||
},
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"caseSensitive": "auto",
|
||||
"enabled": true,
|
||||
"fruit": "no",
|
||||
"name": "disk16",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"timemachine": {
|
||||
"volsizelimit": NaN,
|
||||
},
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"caseSensitive": "auto",
|
||||
"enabled": true,
|
||||
"fruit": "no",
|
||||
"name": "disk17",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"timemachine": {
|
||||
"volsizelimit": NaN,
|
||||
},
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"caseSensitive": "auto",
|
||||
"enabled": true,
|
||||
"fruit": "no",
|
||||
"name": "disk18",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"timemachine": {
|
||||
"volsizelimit": NaN,
|
||||
},
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"caseSensitive": "auto",
|
||||
"enabled": true,
|
||||
"fruit": "no",
|
||||
"name": "disk19",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"timemachine": {
|
||||
"volsizelimit": NaN,
|
||||
},
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"caseSensitive": "auto",
|
||||
"enabled": true,
|
||||
"fruit": "no",
|
||||
"name": "disk20",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"timemachine": {
|
||||
"volsizelimit": NaN,
|
||||
},
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"caseSensitive": "auto",
|
||||
"enabled": true,
|
||||
"fruit": "no",
|
||||
"name": "disk21",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"timemachine": {
|
||||
"volsizelimit": NaN,
|
||||
},
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"caseSensitive": "auto",
|
||||
"enabled": true,
|
||||
"fruit": "no",
|
||||
"name": "disk22",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"timemachine": {
|
||||
"volsizelimit": NaN,
|
||||
},
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"caseSensitive": "auto",
|
||||
"enabled": true,
|
||||
"fruit": "no",
|
||||
"name": "abc",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"timemachine": {
|
||||
"volsizelimit": NaN,
|
||||
},
|
||||
"writeList": [],
|
||||
},
|
||||
{
|
||||
"enabled": true,
|
||||
"fruit": "no",
|
||||
"name": "flash",
|
||||
"readList": [],
|
||||
"security": "public",
|
||||
"timemachine": {
|
||||
"volsizelimit": NaN,
|
||||
},
|
||||
"writeList": [],
|
||||
},
|
||||
]
|
||||
`);
|
||||
});
|
||||
@@ -1,42 +0,0 @@
|
||||
import { join } from 'path';
|
||||
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
import type { UsersIni } from '@app/store/state-parsers/users';
|
||||
import { store } from '@app/store';
|
||||
|
||||
test('Returns parsed state file', async () => {
|
||||
const { parse } = await import('@app/store/state-parsers/users');
|
||||
const { parseConfig } = await import('@app/core/utils/misc/parse-config');
|
||||
const { paths } = store.getState();
|
||||
const filePath = join(paths.states, 'users.ini');
|
||||
const stateFile = parseConfig<UsersIni>({
|
||||
filePath,
|
||||
type: 'ini',
|
||||
});
|
||||
expect(parse(stateFile)).toMatchInlineSnapshot(`
|
||||
[
|
||||
{
|
||||
"description": "Console and webGui login account",
|
||||
"id": "0",
|
||||
"name": "root",
|
||||
"password": true,
|
||||
"role": "admin",
|
||||
},
|
||||
{
|
||||
"description": "",
|
||||
"id": "1",
|
||||
"name": "xo",
|
||||
"password": true,
|
||||
"role": "user",
|
||||
},
|
||||
{
|
||||
"description": "",
|
||||
"id": "2",
|
||||
"name": "test_user",
|
||||
"password": false,
|
||||
"role": "user",
|
||||
},
|
||||
]
|
||||
`);
|
||||
});
|
||||
@@ -1,21 +1,19 @@
|
||||
import { join } from 'path';
|
||||
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
import type { VarIni } from '@app/store/state-parsers/var';
|
||||
import { store } from '@app/store';
|
||||
import type { VarIni } from '@app/store/state-parsers/var';
|
||||
|
||||
test('Returns parsed state file', async () => {
|
||||
const { parse } = await import('@app/store/state-parsers/var');
|
||||
const { parseConfig } = await import('@app/core/utils/misc/parse-config');
|
||||
const { paths } = store.getState();
|
||||
const filePath = join(paths.states, 'var.ini');
|
||||
const stateFile = parseConfig<VarIni>({
|
||||
filePath,
|
||||
type: 'ini',
|
||||
});
|
||||
const { parse } = await import('@app/store/state-parsers/var');
|
||||
const { parseConfig } = await import('@app/core/utils/misc/parse-config');
|
||||
const { paths } = store.getState();
|
||||
const filePath = join(paths.states, 'var.ini');
|
||||
const stateFile = parseConfig<VarIni>({
|
||||
filePath,
|
||||
type: 'ini',
|
||||
});
|
||||
|
||||
expect(parse(stateFile)).toMatchInlineSnapshot(`
|
||||
expect(parse(stateFile)).toMatchInlineSnapshot(`
|
||||
{
|
||||
"bindMgt": false,
|
||||
"cacheNumDevices": NaN,
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
import { expect, test, vi } from 'vitest';
|
||||
|
||||
vi.mock('@app/core/pubsub', () => ({
|
||||
pubsub: { publish: vi.fn() },
|
||||
}));
|
||||
|
||||
test('Creates a registration event', async () => {
|
||||
const { createRegistrationEvent } = await import('@app/store/sync/registration-sync');
|
||||
const { store } = await import('@app/store');
|
||||
const { loadStateFiles } = await import('@app/store/modules/emhttp');
|
||||
|
||||
// Load state files into store
|
||||
await store.dispatch(loadStateFiles());
|
||||
|
||||
const state = store.getState();
|
||||
const registrationEvent = createRegistrationEvent(state);
|
||||
expect(registrationEvent).toMatchInlineSnapshot(`
|
||||
{
|
||||
"registration": {
|
||||
"guid": "13FE-4200-C300-58C372A52B19",
|
||||
"keyFile": {
|
||||
"contents": null,
|
||||
"location": "/app/dev/Unraid.net/Pro.key",
|
||||
},
|
||||
"state": "PRO",
|
||||
"type": "PRO",
|
||||
},
|
||||
}
|
||||
`);
|
||||
});
|
||||
@@ -1,20 +0,0 @@
|
||||
import { type Mapping } from '@runonflux/nat-upnp';
|
||||
import { expect, test, vi } from 'vitest';
|
||||
|
||||
import { getWanPortForUpnp } from '@app/upnp/helpers';
|
||||
|
||||
test('it successfully gets a wan port given no exclusions', () => {
|
||||
const port = getWanPortForUpnp(null, 36_000, 38_000);
|
||||
expect(port).toBeGreaterThan(35_999);
|
||||
expect(port).toBeLessThan(38_001);
|
||||
});
|
||||
|
||||
test('it fails to get a wan port given exclusions', () => {
|
||||
const port = getWanPortForUpnp([{ public: { port: 36_000 } }] as Mapping[], 36_000, 36_000);
|
||||
expect(port).toBeNull();
|
||||
});
|
||||
|
||||
test('it succeeds in getting a wan port given exclusions', () => {
|
||||
const port = getWanPortForUpnp([{ public: { port: 36_000 } }] as Mapping[], 30_000, 36_000);
|
||||
expect(port).not.toBeNull();
|
||||
});
|
||||
@@ -1,71 +0,0 @@
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
import { formatDatetime } from '@app/utils';
|
||||
|
||||
describe('formatDatetime', () => {
|
||||
const testDate = new Date('2024-02-14T12:34:56');
|
||||
|
||||
it('formats with default system time format and omits timezone', () => {
|
||||
const result = formatDatetime(testDate);
|
||||
// Default format is %c with timezone omitted
|
||||
expect(result).toMatch('Wed 14 Feb 2024 12:34:56 PM');
|
||||
});
|
||||
|
||||
it('includes timezone when omitTimezone is false', () => {
|
||||
const result = formatDatetime(testDate, { omitTimezone: false });
|
||||
// Should include timezone at the end
|
||||
expect(result).toMatch(/^Wed 14 Feb 2024 12:34:56 PM .+$/);
|
||||
});
|
||||
|
||||
it('formats with custom date and time formats', () => {
|
||||
const result = formatDatetime(testDate, {
|
||||
dateFormat: '%Y-%m-%d',
|
||||
timeFormat: '%H:%M',
|
||||
});
|
||||
expect(result).toBe('2024-02-14 12:34');
|
||||
});
|
||||
|
||||
it('formats with custom date format and default time format', () => {
|
||||
const result = formatDatetime(testDate, {
|
||||
dateFormat: '%d/%m/%Y',
|
||||
});
|
||||
expect(result).toBe('14/02/2024 12:34 PM');
|
||||
});
|
||||
|
||||
describe('Unraid-style date formats', () => {
|
||||
const dateFormats = [
|
||||
'%A, %Y %B %e', // Day, YYYY Month D
|
||||
'%A, %e %B %Y', // Day, D Month YYYY
|
||||
'%A, %B %e, %Y', // Day, Month D, YYYY
|
||||
'%A, %m/%d/%Y', // Day, MM/DD/YYYY
|
||||
'%A, %d-%m-%Y', // Day, DD-MM-YYYY
|
||||
'%A, %d.%m.%Y', // Day, DD.MM.YYYY
|
||||
'%A, %Y-%m-%d', // Day, YYYY-MM-DD
|
||||
];
|
||||
|
||||
const timeFormats = [
|
||||
'%I:%M %p', // 12 hours
|
||||
'%R', // 24 hours
|
||||
];
|
||||
|
||||
it.each(dateFormats)('formats date with %s', (dateFormat) => {
|
||||
const result = formatDatetime(testDate, { dateFormat });
|
||||
expect(result).toMatch(/^Wednesday.*2024.*12:34 PM$/);
|
||||
});
|
||||
|
||||
it.each(timeFormats)('formats time with %s', (timeFormat) => {
|
||||
// specify a non-system-time date format for this test
|
||||
const result = formatDatetime(testDate, { timeFormat, dateFormat: dateFormats[1] });
|
||||
const expectedTime = timeFormat === '%R' ? '12:34' : '12:34 PM';
|
||||
expect(result).toContain(expectedTime);
|
||||
});
|
||||
|
||||
it.each(dateFormats.flatMap((d) => timeFormats.map((t) => [d, t])))(
|
||||
'formats with date format %s and time format %s',
|
||||
(dateFormat, timeFormat) => {
|
||||
const result = formatDatetime(testDate, { dateFormat, timeFormat });
|
||||
expect(result).toMatch(/^Wednesday.*2024.*(?:12:34 PM|12:34)$/);
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -1,39 +1,13 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import '@app/dotenv';
|
||||
|
||||
import { execa } from 'execa';
|
||||
import { CommandFactory } from 'nest-commander';
|
||||
|
||||
import { internalLogger, logger } from '@app/core/log';
|
||||
import { LOG_LEVEL } from '@app/environment';
|
||||
import { CliModule } from '@app/unraid-api/cli/cli.module';
|
||||
import { LogService } from '@app/unraid-api/cli/log.service';
|
||||
|
||||
const getUnraidApiLocation = async () => {
|
||||
try {
|
||||
const shellToUse = await execa('which unraid-api');
|
||||
return shellToUse.stdout.trim();
|
||||
} catch (err) {
|
||||
logger.debug('Could not find unraid-api in PATH, using default location');
|
||||
|
||||
return '/usr/bin/unraid-api';
|
||||
}
|
||||
};
|
||||
import { main } from '@app/cli/index';
|
||||
import { internalLogger } from '@app/core/log';
|
||||
|
||||
try {
|
||||
await CommandFactory.run(CliModule, {
|
||||
cliName: 'unraid-api',
|
||||
logger: LOG_LEVEL === 'TRACE' ? new LogService() : false, // - enable this to see nest initialization issues
|
||||
completion: {
|
||||
fig: false,
|
||||
cmd: 'completion-script',
|
||||
nativeShell: { executablePath: await getUnraidApiLocation() },
|
||||
},
|
||||
});
|
||||
process.exit(0);
|
||||
await main();
|
||||
} catch (error) {
|
||||
logger.error('ERROR:', error);
|
||||
console.log(error);
|
||||
internalLogger.error({
|
||||
message: 'Failed to start unraid-api',
|
||||
error,
|
||||
|
||||
366
api/src/cli/commands/report.ts
Normal file
366
api/src/cli/commands/report.ts
Normal file
@@ -0,0 +1,366 @@
|
||||
import ipRegex from 'ip-regex';
|
||||
import readLine from 'readline';
|
||||
import { setEnv } from '@app/cli/set-env';
|
||||
import { isUnraidApiRunning } from '@app/core/utils/pm2/unraid-api-running';
|
||||
import { cliLogger } from '@app/core/log';
|
||||
import { getters, store } from '@app/store';
|
||||
import { stdout } from 'process';
|
||||
import { loadConfigFile } from '@app/store/modules/config';
|
||||
import { getApiApolloClient } from '../../graphql/client/api/get-api-client';
|
||||
import {
|
||||
getCloudDocument,
|
||||
getServersDocument,
|
||||
type getServersQuery,
|
||||
type getCloudQuery,
|
||||
} from '../../graphql/generated/api/operations';
|
||||
import { MinigraphStatus } from '@app/graphql/generated/api/types';
|
||||
import { API_VERSION } from '@app/environment';
|
||||
import { loadStateFiles } from '@app/store/modules/emhttp';
|
||||
import { ApolloClient, ApolloQueryResult, NormalizedCacheObject } from '@apollo/client/core/index.js';
|
||||
|
||||
type CloudQueryResult = NonNullable<ApolloQueryResult<getCloudQuery>['data']['cloud']>;
|
||||
type ServersQueryResultServer = NonNullable<ApolloQueryResult<getServersQuery>['data']['servers']>[0];
|
||||
|
||||
type Verbosity = '' | '-v' | '-vv';
|
||||
|
||||
type ServersPayload = {
|
||||
online: ServersQueryResultServer[];
|
||||
offline: ServersQueryResultServer[];
|
||||
invalid: ServersQueryResultServer[];
|
||||
};
|
||||
|
||||
type ReportObject = {
|
||||
os: {
|
||||
serverName: string;
|
||||
version: string;
|
||||
};
|
||||
api: {
|
||||
version: string;
|
||||
status: 'running' | 'stopped';
|
||||
environment: string;
|
||||
nodeVersion: string;
|
||||
};
|
||||
apiKey: 'valid' | 'invalid' | string;
|
||||
servers?: ServersPayload | null;
|
||||
myServers: {
|
||||
status: 'authenticated' | 'signed out';
|
||||
myServersUsername?: string;
|
||||
};
|
||||
minigraph: {
|
||||
status: MinigraphStatus;
|
||||
timeout: number | null;
|
||||
error: string | null;
|
||||
};
|
||||
cloud: {
|
||||
status: string;
|
||||
error?: string;
|
||||
ip?: string;
|
||||
allowedOrigins?: string[] | null;
|
||||
};
|
||||
};
|
||||
|
||||
// This should return the status of the apiKey and mothership
|
||||
export const getCloudData = async (
|
||||
client: ApolloClient<NormalizedCacheObject>
|
||||
): Promise<CloudQueryResult | null> => {
|
||||
try {
|
||||
const cloud = await client.query({ query: getCloudDocument });
|
||||
return cloud.data.cloud ?? null;
|
||||
} catch (error: unknown) {
|
||||
cliLogger.trace(
|
||||
'Failed fetching cloud from local graphql with "%s"',
|
||||
error instanceof Error ? error.message : 'Unknown Error'
|
||||
);
|
||||
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
export const getServersData = async ({
|
||||
client,
|
||||
v,
|
||||
}: {
|
||||
client: ApolloClient<NormalizedCacheObject>;
|
||||
v: Verbosity;
|
||||
}): Promise<ServersPayload | null> => {
|
||||
if (v === '') {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const servers = await client.query({ query: getServersDocument });
|
||||
const foundServers = servers.data.servers.reduce<ServersPayload>(
|
||||
(acc, curr) => {
|
||||
switch (curr.status) {
|
||||
case 'online':
|
||||
acc.online.push(curr);
|
||||
break;
|
||||
case 'offline':
|
||||
acc.offline.push(curr);
|
||||
break;
|
||||
default:
|
||||
acc.invalid.push(curr);
|
||||
break;
|
||||
}
|
||||
|
||||
return acc;
|
||||
},
|
||||
{ online: [], offline: [], invalid: [] }
|
||||
);
|
||||
return foundServers;
|
||||
} catch (error: unknown) {
|
||||
cliLogger.trace(
|
||||
'Failed fetching servers from local graphql with "%s"',
|
||||
error instanceof Error ? error.message : 'Unknown Error'
|
||||
);
|
||||
return {
|
||||
online: [],
|
||||
offline: [],
|
||||
invalid: [],
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const hashUrlRegex = () => /(.*)([a-z0-9]{40})(.*)/g;
|
||||
|
||||
export const anonymiseOrigins = (origins?: string[]): string[] => {
|
||||
const originsWithoutSocks = origins?.filter((url) => !url.endsWith('.sock')) ?? [];
|
||||
return originsWithoutSocks
|
||||
.map((origin) =>
|
||||
origin
|
||||
// Replace 40 char hash string with "HASH"
|
||||
.replace(hashUrlRegex(), '$1HASH$3')
|
||||
// Replace ipv4 address using . separator with "IPV4ADDRESS"
|
||||
.replace(ipRegex(), 'IPV4ADDRESS')
|
||||
// Replace ipv4 address using - separator with "IPV4ADDRESS"
|
||||
.replace(new RegExp(ipRegex().toString().replace('\\.', '-')), '/IPV4ADDRESS')
|
||||
// Report WAN port
|
||||
.replace(`:${getters.config().remote.wanport || 443}`, ':WANPORT')
|
||||
)
|
||||
.filter(Boolean);
|
||||
};
|
||||
|
||||
const getAllowedOrigins = (cloud: CloudQueryResult | null, v: Verbosity): string[] | null => {
|
||||
switch (v) {
|
||||
case '-vv':
|
||||
return cloud?.allowedOrigins.filter((url) => !url.endsWith('.sock')) ?? [];
|
||||
case '-v':
|
||||
return anonymiseOrigins(cloud?.allowedOrigins ?? []);
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
const getReadableCloudDetails = (reportObject: ReportObject, v: Verbosity): string => {
|
||||
const error = reportObject.cloud.error ? `\n ERROR [${reportObject.cloud.error}]` : '';
|
||||
const status = reportObject.cloud.status ? reportObject.cloud.status : 'disconnected';
|
||||
const ip = reportObject.cloud.ip && v !== '' ? `\n IP: [${reportObject.cloud.ip}]` : '';
|
||||
return `
|
||||
STATUS: [${status}] ${ip} ${error}`;
|
||||
};
|
||||
|
||||
const getReadableMinigraphDetails = (reportObject: ReportObject): string => {
|
||||
const statusLine = `STATUS: [${reportObject.minigraph.status}]`;
|
||||
const errorLine = reportObject.minigraph.error ? ` ERROR: [${reportObject.minigraph.error}]` : null;
|
||||
const timeoutLine = reportObject.minigraph.timeout
|
||||
? ` TIMEOUT: [${(reportObject.minigraph.timeout || 1) / 1_000}s]`
|
||||
: null; // 1 in case of divide by zero
|
||||
|
||||
return `
|
||||
${statusLine}${errorLine ? `\n${errorLine}` : ''}${timeoutLine ? `\n${timeoutLine}` : ''}`;
|
||||
};
|
||||
|
||||
// Convert server to string output
|
||||
const serverToString = (v: Verbosity) => (server: ServersQueryResultServer) =>
|
||||
`${server?.name ?? 'No Server Name'}${
|
||||
v === '-v' || v === '-vv'
|
||||
? `[owner="${server.owner?.username ?? 'No Owner Found'}"${
|
||||
v === '-vv' ? ` guid="${server.guid ?? 'No GUID'}"]` : ']'
|
||||
}`
|
||||
: ''
|
||||
}`;
|
||||
|
||||
const getReadableServerDetails = (reportObject: ReportObject, v: Verbosity): string => {
|
||||
if (!reportObject.servers) {
|
||||
return '';
|
||||
}
|
||||
|
||||
if (reportObject.api.status === 'stopped') {
|
||||
return '\nSERVERS: API is offline';
|
||||
}
|
||||
|
||||
const invalid =
|
||||
(v === '-v' || v === '-vv') && reportObject.servers.invalid.length > 0
|
||||
? `
|
||||
INVALID: ${reportObject.servers.invalid.map(serverToString(v)).join(',')}`
|
||||
: '';
|
||||
|
||||
return `
|
||||
SERVERS:
|
||||
ONLINE: ${reportObject.servers.online.map(serverToString(v)).join(',')}
|
||||
OFFLINE: ${reportObject.servers.offline.map(serverToString(v)).join(',')}${invalid}`;
|
||||
};
|
||||
|
||||
const getReadableAllowedOrigins = (reportObject: ReportObject): string => {
|
||||
const { cloud } = reportObject;
|
||||
if (cloud?.allowedOrigins) {
|
||||
return `
|
||||
ALLOWED_ORIGINS: ${cloud.allowedOrigins.join(', ').trim()}`;
|
||||
}
|
||||
|
||||
return '';
|
||||
};
|
||||
|
||||
const getVerbosity = (argv: string[]): Verbosity => {
|
||||
if (argv.includes('-v')) {
|
||||
return '-v';
|
||||
}
|
||||
|
||||
if (argv.includes('-vv')) {
|
||||
return '-vv';
|
||||
}
|
||||
|
||||
return '';
|
||||
};
|
||||
|
||||
export const report = async (...argv: string[]) => {
|
||||
// Check if the user has raw output enabled
|
||||
const rawOutput = argv.includes('--raw');
|
||||
|
||||
// Check if we have a tty attached to stdout
|
||||
// If we don't then this is being piped to a log file, etc.
|
||||
const hasTty = process.stdout.isTTY;
|
||||
|
||||
// Check if we should show interactive logs
|
||||
// If this has a tty it's interactive
|
||||
// AND
|
||||
// If they don't have --raw
|
||||
const isInteractive = hasTty && !rawOutput;
|
||||
|
||||
const stdoutLogger = readLine.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout,
|
||||
});
|
||||
|
||||
try {
|
||||
setEnv('LOG_TYPE', 'raw');
|
||||
|
||||
// Show loading message
|
||||
if (isInteractive) {
|
||||
stdoutLogger.write('Generating report please wait…');
|
||||
}
|
||||
|
||||
const jsonReport = argv.includes('--json');
|
||||
const v = getVerbosity(argv);
|
||||
|
||||
// Find all processes called "unraid-api" which aren't this process
|
||||
const unraidApiRunning = await isUnraidApiRunning();
|
||||
|
||||
// Load my servers config file into store
|
||||
await store.dispatch(loadConfigFile());
|
||||
await store.dispatch(loadStateFiles());
|
||||
|
||||
const { config, emhttp } = store.getState();
|
||||
if (!config.upc.apikey) throw new Error('Missing UPC API key');
|
||||
|
||||
const client = getApiApolloClient({ upcApiKey: config.upc.apikey });
|
||||
// Fetch the cloud endpoint
|
||||
const cloud = await getCloudData(client);
|
||||
|
||||
// Log cloud response
|
||||
cliLogger.trace('Cloud response %s', JSON.stringify(cloud, null, 0));
|
||||
|
||||
// Query local graphql using upc's API key
|
||||
// Get the servers array
|
||||
const servers = await getServersData({ client, v });
|
||||
|
||||
// Check if the API key is valid
|
||||
const isApiKeyValid = cloud?.apiKey.valid ?? false;
|
||||
|
||||
const reportObject: ReportObject = {
|
||||
os: {
|
||||
serverName: emhttp.var.name,
|
||||
version: emhttp.var.version,
|
||||
},
|
||||
api: {
|
||||
version: API_VERSION,
|
||||
status: unraidApiRunning ? 'running' : 'stopped',
|
||||
environment: process.env.ENVIRONMENT ?? 'THIS_WILL_BE_REPLACED_WHEN_BUILT',
|
||||
nodeVersion: process.version,
|
||||
},
|
||||
apiKey: isApiKeyValid ? 'valid' : cloud?.apiKey.error ?? 'invalid',
|
||||
...(servers ? { servers } : {}),
|
||||
myServers: {
|
||||
status: config?.remote?.username ? 'authenticated' : 'signed out',
|
||||
...(config?.remote?.username
|
||||
? {
|
||||
myServersUsername: config?.remote?.username?.includes('@')
|
||||
? 'REDACTED'
|
||||
: config?.remote.username,
|
||||
}
|
||||
: {}),
|
||||
},
|
||||
minigraph: {
|
||||
status: cloud?.minigraphql.status ?? MinigraphStatus.PRE_INIT,
|
||||
timeout: cloud?.minigraphql.timeout ?? null,
|
||||
error:
|
||||
cloud?.minigraphql.error ?? !cloud?.minigraphql.status ? 'API Disconnected' : null,
|
||||
},
|
||||
cloud: {
|
||||
status: cloud?.cloud.status ?? 'error',
|
||||
...(cloud?.cloud.error ? { error: cloud.cloud.error } : {}),
|
||||
...(cloud?.cloud.status === 'ok' ? { ip: cloud.cloud.ip ?? 'NO_IP' } : {}),
|
||||
...(getAllowedOrigins(cloud, v) ? { allowedOrigins: getAllowedOrigins(cloud, v) } : {}),
|
||||
},
|
||||
};
|
||||
|
||||
// If we have trace logs or the user selected --raw don't clear the screen
|
||||
if (process.env.LOG_LEVEL !== 'trace' && isInteractive) {
|
||||
// Clear the original log about the report being generated
|
||||
readLine.cursorTo(process.stdout, 0, 0);
|
||||
readLine.clearScreenDown(process.stdout);
|
||||
}
|
||||
|
||||
if (jsonReport) {
|
||||
stdout.write(JSON.stringify(reportObject) + '\n');
|
||||
stdoutLogger.close();
|
||||
return reportObject;
|
||||
} else {
|
||||
// Generate the actual report
|
||||
const report = `
|
||||
<-----UNRAID-API-REPORT----->
|
||||
SERVER_NAME: ${reportObject.os.serverName}
|
||||
ENVIRONMENT: ${reportObject.api.environment}
|
||||
UNRAID_VERSION: ${reportObject.os.version}
|
||||
UNRAID_API_VERSION: ${reportObject.api.version}
|
||||
UNRAID_API_STATUS: ${reportObject.api.status}
|
||||
API_KEY: ${reportObject.apiKey}
|
||||
MY_SERVERS: ${reportObject.myServers.status}${
|
||||
reportObject.myServers.myServersUsername
|
||||
? `\nMY_SERVERS_USERNAME: ${reportObject.myServers.myServersUsername}`
|
||||
: ''
|
||||
}
|
||||
CLOUD: ${getReadableCloudDetails(reportObject, v)}
|
||||
MINI-GRAPH: ${getReadableMinigraphDetails(reportObject)}${getReadableServerDetails(
|
||||
reportObject,
|
||||
v
|
||||
)}${getReadableAllowedOrigins(reportObject)}
|
||||
</----UNRAID-API-REPORT----->
|
||||
`;
|
||||
|
||||
stdout.write(report);
|
||||
stdoutLogger.close();
|
||||
return report;
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
console.log({ error });
|
||||
if (error instanceof Error) {
|
||||
cliLogger.trace(error);
|
||||
stdoutLogger.write(`\nFailed generating report with "${error.message}"\n`);
|
||||
return;
|
||||
}
|
||||
|
||||
stdout.write(`${error as string}`);
|
||||
stdoutLogger.close();
|
||||
}
|
||||
};
|
||||
10
api/src/cli/commands/restart.ts
Normal file
10
api/src/cli/commands/restart.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { start } from '@app/cli/commands/start';
|
||||
import { stop } from '@app/cli/commands/stop';
|
||||
|
||||
/**
|
||||
* Stop a running API process and then start it again.
|
||||
*/
|
||||
export const restart = async () => {
|
||||
await stop();
|
||||
await start();
|
||||
};
|
||||
16
api/src/cli/commands/start.ts
Normal file
16
api/src/cli/commands/start.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { PM2_PATH } from '@app/consts';
|
||||
import { cliLogger } from '@app/core/log';
|
||||
import { execSync } from 'child_process';
|
||||
import { join } from 'node:path';
|
||||
/**
|
||||
* Start a new API process.
|
||||
*/
|
||||
export const start = async () => {
|
||||
cliLogger.info('Starting unraid-api with command', `${PM2_PATH} start ${join(import.meta.dirname, 'ecosystem.config.json')} --update-env`);
|
||||
|
||||
execSync(`${PM2_PATH} start ${join(import.meta.dirname, '../../', 'ecosystem.config.json')} --update-env`, {
|
||||
env: process.env,
|
||||
stdio: 'inherit',
|
||||
cwd: process.cwd()
|
||||
});
|
||||
};
|
||||
7
api/src/cli/commands/status.ts
Normal file
7
api/src/cli/commands/status.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import { PM2_PATH } from '@app/consts';
|
||||
import { execSync } from 'child_process';
|
||||
|
||||
export const status = async () => {
|
||||
execSync(`${PM2_PATH} status unraid-api`, { stdio: 'inherit' });
|
||||
process.exit(0);
|
||||
};
|
||||
6
api/src/cli/commands/stop.ts
Normal file
6
api/src/cli/commands/stop.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import { PM2_PATH } from '@app/consts';
|
||||
import { execSync } from 'child_process';
|
||||
|
||||
export const stop = async () => {
|
||||
execSync(`${PM2_PATH} stop unraid-api`, { stdio: 'inherit' });
|
||||
};
|
||||
64
api/src/cli/commands/switch-env.ts
Normal file
64
api/src/cli/commands/switch-env.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
import { copyFile, readFile, writeFile } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
import { cliLogger } from '@app/core/log';
|
||||
import { getters } from '@app/store';
|
||||
import { start } from '@app/cli/commands/start';
|
||||
import { stop } from '@app/cli/commands/stop';
|
||||
|
||||
export const switchEnv = async () => {
|
||||
const paths = getters.paths();
|
||||
const basePath = paths['unraid-api-base'];
|
||||
const envFlashFilePath = paths['myservers-env'];
|
||||
const envFile = await readFile(envFlashFilePath, 'utf-8').catch(() => '');
|
||||
|
||||
await stop();
|
||||
|
||||
cliLogger.debug(
|
||||
'Checking %s for current ENV, found %s',
|
||||
envFlashFilePath,
|
||||
envFile
|
||||
);
|
||||
|
||||
// Match the env file env="production" which would be [0] = env="production", [1] = env and [2] = production
|
||||
const matchArray = /([a-zA-Z]+)=["]*([a-zA-Z]+)["]*/.exec(envFile);
|
||||
// Get item from index 2 of the regex match or return undefined
|
||||
const [, , currentEnvInFile] =
|
||||
matchArray && matchArray.length === 3 ? matchArray : [];
|
||||
|
||||
let newEnv = 'production';
|
||||
|
||||
// Switch from staging to production
|
||||
if (currentEnvInFile === 'staging') {
|
||||
newEnv = 'production';
|
||||
}
|
||||
|
||||
// Switch from production to staging
|
||||
if (currentEnvInFile === 'production') {
|
||||
newEnv = 'staging';
|
||||
}
|
||||
|
||||
if (currentEnvInFile) {
|
||||
cliLogger.debug(
|
||||
'Switching from "%s" to "%s"...',
|
||||
currentEnvInFile,
|
||||
newEnv
|
||||
);
|
||||
} else {
|
||||
cliLogger.debug('No ENV found, setting env to "production"...');
|
||||
}
|
||||
|
||||
// Write new env to flash
|
||||
const newEnvLine = `env="${newEnv}"`;
|
||||
await writeFile(envFlashFilePath, newEnvLine);
|
||||
cliLogger.debug('Writing %s to %s', newEnvLine, envFlashFilePath);
|
||||
|
||||
// Copy the new env over to live location before restarting
|
||||
const source = join(basePath, `.env.${newEnv}`);
|
||||
const destination = join(basePath, '.env');
|
||||
|
||||
cliLogger.debug('Copying %s to %s', source, destination);
|
||||
await copyFile(source, destination);
|
||||
|
||||
cliLogger.info('Now using %s', newEnv);
|
||||
await start();
|
||||
};
|
||||
53
api/src/cli/index.ts
Executable file
53
api/src/cli/index.ts
Executable file
@@ -0,0 +1,53 @@
|
||||
import { parse } from 'ts-command-line-args';
|
||||
import { cliLogger } from '@app/core/log';
|
||||
import { type Flags, mainOptions, options, args } from '@app/cli/options';
|
||||
import { setEnv } from '@app/cli/set-env';
|
||||
import { getters } from '@app/store';
|
||||
import { execSync } from 'child_process';
|
||||
import { PM2_PATH } from '@app/consts';
|
||||
import * as ENVIRONMENT from '@app/environment';
|
||||
|
||||
const command = mainOptions.command as unknown as string;
|
||||
|
||||
export const main = async (...argv: string[]) => {
|
||||
// Set envs
|
||||
cliLogger.debug({ paths: getters.paths(), environment: ENVIRONMENT }, 'Starting CLI');
|
||||
|
||||
setEnv('PORT', process.env.PORT ?? mainOptions.port ?? '9000');
|
||||
|
||||
if (!command) {
|
||||
// Run help command
|
||||
parse<Flags>(args, {
|
||||
...options,
|
||||
partial: true,
|
||||
stopAtFirstUnknown: true,
|
||||
argv: ['-h'],
|
||||
});
|
||||
}
|
||||
|
||||
// Only import the command we need when we use it
|
||||
const commands = {
|
||||
start: import('@app/cli/commands/start').then((pkg) => pkg.start),
|
||||
stop: import('@app/cli/commands/stop').then((pkg) => pkg.stop),
|
||||
restart: import('@app/cli/commands/restart').then((pkg) => pkg.restart),
|
||||
logs: async () => execSync(`${PM2_PATH} logs unraid-api --lines 200`, { stdio: 'inherit' }),
|
||||
'switch-env': import('@app/cli/commands/switch-env').then((pkg) => pkg.switchEnv),
|
||||
version: import('@app/cli/commands/version').then((pkg) => pkg.version),
|
||||
status: import('@app/cli/commands/status').then((pkg) => pkg.status),
|
||||
report: import('@app/cli/commands/report').then((pkg) => pkg.report),
|
||||
'validate-token': import('@app/cli/commands/validate-token').then((pkg) => pkg.validateToken),
|
||||
};
|
||||
|
||||
// Unknown command
|
||||
if (!Object.keys(commands).includes(command)) {
|
||||
throw new Error(`Invalid command "${command}"`);
|
||||
}
|
||||
|
||||
// Resolve the command import
|
||||
const commandMethod = await commands[command];
|
||||
|
||||
// Run the command
|
||||
await commandMethod(...argv);
|
||||
|
||||
process.exit(0);
|
||||
};
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user