mirror of
https://github.com/unraid/api.git
synced 2026-01-02 14:40:01 -06:00
Compare commits
52 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c5a394eddf | ||
|
|
1d30b25a0a | ||
|
|
026b0b344c | ||
|
|
72860e71fe | ||
|
|
1236b7743e | ||
|
|
ea77de8800 | ||
|
|
ad0f4c8b55 | ||
|
|
d74d9f1246 | ||
|
|
45ecab6914 | ||
|
|
e65775f878 | ||
|
|
33ad1fd63b | ||
|
|
234573264c | ||
|
|
f5724abffb | ||
|
|
8bb9efcb68 | ||
|
|
97ab6fbe32 | ||
|
|
36a7a28ed5 | ||
|
|
03be042410 | ||
|
|
cd323acd49 | ||
|
|
457d338150 | ||
|
|
04caaf3b25 | ||
|
|
4b5743906a | ||
|
|
f65788aa94 | ||
|
|
8a5b23856c | ||
|
|
23c60dad0c | ||
|
|
1bbe7d27b0 | ||
|
|
bc3ca92fb0 | ||
|
|
c4fdff8149 | ||
|
|
0e008aaf1e | ||
|
|
da8dac3940 | ||
|
|
187a6ec670 | ||
|
|
819ba0bd71 | ||
|
|
1217d0b100 | ||
|
|
15dc458751 | ||
|
|
a151dc1f73 | ||
|
|
006fe1e762 | ||
|
|
7e89cd2a3e | ||
|
|
38a9e36fcd | ||
|
|
d701151729 | ||
|
|
0f682b5f23 | ||
|
|
3fff76d155 | ||
|
|
d8d7075ed8 | ||
|
|
e27e38645f | ||
|
|
4bb00dd981 | ||
|
|
60f16bde41 | ||
|
|
cacb1c1d3d | ||
|
|
a562f77163 | ||
|
|
37e72f9729 | ||
|
|
662d5f64c9 | ||
|
|
79397eecff | ||
|
|
99d8b31fa8 | ||
|
|
d0827df60e | ||
|
|
9568aabd17 |
11
.cursor/rules/api-rules.mdc
Normal file
11
.cursor/rules/api-rules.mdc
Normal file
@@ -0,0 +1,11 @@
|
||||
---
|
||||
description:
|
||||
globs: api/*
|
||||
alwaysApply: false
|
||||
---
|
||||
|
||||
* pnpm ONLY
|
||||
* always run scripts from api/package.json unless requested
|
||||
* prefer adding new files to the nest repo located at api/src/unraid-api/ instead of the legacy code
|
||||
* Test suite is VITEST, do not use jest
|
||||
* Prefer to not mock simple dependencies
|
||||
224
.cursor/rules/web-testing-rules.mdc
Normal file
224
.cursor/rules/web-testing-rules.mdc
Normal file
@@ -0,0 +1,224 @@
|
||||
---
|
||||
description:
|
||||
globs: **/*.test.ts,**/__test__/components/**/*.ts,**/__test__/store/**/*.ts,**/__test__/mocks/**/*.ts
|
||||
alwaysApply: false
|
||||
---
|
||||
|
||||
## Vue Component Testing Best Practices
|
||||
- This is a Nuxt.js app but we are testing with vitest outside of the Nuxt environment
|
||||
- Nuxt is currently set to auto import so some vue files may need compute or ref imported
|
||||
- Use pnpm when running termical commands and stay within the web directory.
|
||||
- The directory for tests is located under `web/test` when running test just run `pnpm test`
|
||||
|
||||
### Setup
|
||||
- Use `mount` from Vue Test Utils for component testing
|
||||
- Stub complex child components that aren't the focus of the test
|
||||
- Mock external dependencies and services
|
||||
|
||||
```typescript
|
||||
import { mount } from '@vue/test-utils';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
import YourComponent from '~/components/YourComponent.vue';
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('~/helpers/someHelper', () => ({
|
||||
SOME_CONSTANT: 'mocked-value',
|
||||
}));
|
||||
|
||||
describe('YourComponent', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it('renders correctly', () => {
|
||||
const wrapper = mount(YourComponent, {
|
||||
global: {
|
||||
stubs: {
|
||||
// Stub child components when needed
|
||||
ChildComponent: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Assertions
|
||||
expect(wrapper.text()).toContain('Expected content');
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### Testing Patterns
|
||||
- Test component behavior and output, not implementation details
|
||||
- Verify that the expected elements are rendered
|
||||
- Test component interactions (clicks, inputs, etc.)
|
||||
- Check for expected prop handling and event emissions
|
||||
|
||||
### Finding Elements
|
||||
- Use semantic queries like `find('button')` or `find('[data-test="id"]')` but prefer not to use data test ID's
|
||||
- Find components with `findComponent(ComponentName)`
|
||||
- Use `findAll` to check for multiple elements
|
||||
|
||||
### Assertions
|
||||
- Assert on rendered text content with `wrapper.text()`
|
||||
- Assert on element attributes with `element.attributes()`
|
||||
- Verify element existence with `expect(element.exists()).toBe(true)`
|
||||
- Check component state through rendered output
|
||||
|
||||
### Component Interaction
|
||||
- Trigger events with `await element.trigger('click')`
|
||||
- Set input values with `await input.setValue('value')`
|
||||
- Test emitted events with `wrapper.emitted()`
|
||||
|
||||
### Mocking
|
||||
- Mock external services and API calls
|
||||
- Prefer not using mocks whenever possible
|
||||
- Use `vi.mock()` for module-level mocks
|
||||
- Specify return values for component methods with `vi.spyOn()`
|
||||
- Reset mocks between tests with `vi.clearAllMocks()`
|
||||
- Frequently used mocks are stored under `web/test/mocks`
|
||||
|
||||
### Async Testing
|
||||
- Use `await nextTick()` for DOM updates
|
||||
- Use `flushPromises()` for more complex promise chains
|
||||
- Always await async operations before making assertions
|
||||
|
||||
## Store Testing with Pinia
|
||||
|
||||
### Basic Setup
|
||||
```typescript
|
||||
import { createPinia, setActivePinia } from 'pinia';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
import { useYourStore } from '~/store/your-store';
|
||||
|
||||
// Mock declarations must be at top level due to hoisting
|
||||
const mockDependencyFn = vi.fn();
|
||||
|
||||
// Module mocks must use factory functions
|
||||
vi.mock('~/store/dependency', () => ({
|
||||
useDependencyStore: () => ({
|
||||
someMethod: mockDependencyFn,
|
||||
someProperty: 'mockValue'
|
||||
})
|
||||
}));
|
||||
|
||||
describe('Your Store', () => {
|
||||
let store: ReturnType<typeof useYourStore>;
|
||||
|
||||
beforeEach(() => {
|
||||
setActivePinia(createPinia());
|
||||
store = useYourStore();
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.resetAllMocks();
|
||||
});
|
||||
|
||||
it('tests some action', () => {
|
||||
store.someAction();
|
||||
expect(mockDependencyFn).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### Important Guidelines
|
||||
1. **Store Initialization**
|
||||
- Use `createPinia()` instead of `createTestingPinia()` for most cases
|
||||
- Only use `createTestingPinia` if you specifically need its testing features
|
||||
- Let stores initialize with their natural default state instead of forcing initial state
|
||||
- Do not mock the store we're actually testing in the test file. That's why we're using `createPinia()`
|
||||
|
||||
2. **Vue Reactivity**
|
||||
- Ensure Vue reactivity imports are added to original store files as they may be missing because Nuxt auto import was turned on
|
||||
- Don't rely on Nuxt auto-imports in tests
|
||||
|
||||
```typescript
|
||||
// Required in store files, even with Nuxt auto-imports
|
||||
import { computed, ref, watchEffect } from 'vue';
|
||||
```
|
||||
|
||||
3. **Mocking Best Practices**
|
||||
- Place all mock declarations at the top level
|
||||
- Use factory functions for module mocks to avoid hoisting issues
|
||||
|
||||
```typescript
|
||||
// ❌ Wrong - will cause hoisting issues
|
||||
const mockFn = vi.fn();
|
||||
vi.mock('module', () => ({ method: mockFn }));
|
||||
|
||||
// ✅ Correct - using factory function
|
||||
vi.mock('module', () => {
|
||||
const mockFn = vi.fn();
|
||||
return { method: mockFn };
|
||||
});
|
||||
```
|
||||
|
||||
4. **Testing Actions**
|
||||
- Test action side effects and state changes
|
||||
- Verify actions are called with correct parameters
|
||||
- Mock external dependencies appropriately
|
||||
|
||||
```typescript
|
||||
it('should handle action correctly', () => {
|
||||
store.yourAction();
|
||||
expect(mockDependencyFn).toHaveBeenCalledWith(
|
||||
expectedArg1,
|
||||
expectedArg2
|
||||
);
|
||||
expect(store.someState).toBe(expectedValue);
|
||||
});
|
||||
```
|
||||
|
||||
5. **Common Pitfalls**
|
||||
- Don't mix mock declarations and module mocks incorrectly
|
||||
- Avoid relying on Nuxt's auto-imports in test environment
|
||||
- Clear mocks between tests to ensure isolation
|
||||
- Remember that `vi.mock()` calls are hoisted
|
||||
|
||||
### Testing State & Getters
|
||||
- Test computed properties by accessing them directly
|
||||
- Verify state changes after actions
|
||||
- Test getter dependencies are properly mocked
|
||||
|
||||
```typescript
|
||||
it('computes derived state correctly', () => {
|
||||
store.setState('new value');
|
||||
expect(store.computedValue).toBe('expected result');
|
||||
});
|
||||
```
|
||||
|
||||
### Testing Complex Interactions
|
||||
- Test store interactions with other stores
|
||||
- Verify proper error handling
|
||||
- Test async operations completely
|
||||
|
||||
```typescript
|
||||
it('handles async operations', async () => {
|
||||
const promise = store.asyncAction();
|
||||
expect(store.status).toBe('loading');
|
||||
await promise;
|
||||
expect(store.status).toBe('success');
|
||||
});
|
||||
```
|
||||
|
||||
### Testing Actions
|
||||
- Verify actions are called with the right parameters
|
||||
- Test action side effects if not stubbed
|
||||
- Override specific action implementations when needed
|
||||
|
||||
```typescript
|
||||
// Test action calls
|
||||
store.yourAction(params);
|
||||
expect(store.yourAction).toHaveBeenCalledWith(params);
|
||||
|
||||
// Test with real implementation
|
||||
const pinia = createTestingPinia({
|
||||
createSpy: vi.fn,
|
||||
stubActions: false,
|
||||
});
|
||||
```
|
||||
|
||||
### Testing State & Getters
|
||||
- Set initial state for focused testing
|
||||
- Test computed properties by accessing them directly
|
||||
- Verify state changes by updating the store
|
||||
|
||||
190
.github/workflows/build-plugin.yml
vendored
Normal file
190
.github/workflows/build-plugin.yml
vendored
Normal file
@@ -0,0 +1,190 @@
|
||||
name: Build Plugin Component
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
RELEASE_CREATED:
|
||||
type: string
|
||||
required: true
|
||||
description: "Whether a release was created"
|
||||
RELEASE_TAG:
|
||||
type: string
|
||||
required: false
|
||||
description: "Name of the tag when a release is created"
|
||||
TAG:
|
||||
type: string
|
||||
required: false
|
||||
description: "Tag for the build (e.g. PR number or version)"
|
||||
BUCKET_PATH:
|
||||
type: string
|
||||
required: true
|
||||
description: "Path in the bucket where artifacts should be stored"
|
||||
BASE_URL:
|
||||
type: string
|
||||
required: true
|
||||
description: "Base URL for the plugin builds"
|
||||
secrets:
|
||||
CF_ACCESS_KEY_ID:
|
||||
required: true
|
||||
CF_SECRET_ACCESS_KEY:
|
||||
required: true
|
||||
CF_BUCKET_PREVIEW:
|
||||
required: true
|
||||
CF_ENDPOINT:
|
||||
required: true
|
||||
jobs:
|
||||
build-plugin:
|
||||
name: Build and Deploy Plugin
|
||||
defaults:
|
||||
run:
|
||||
working-directory: plugin
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Get API Version
|
||||
id: vars
|
||||
run: |
|
||||
GIT_SHA=$(git rev-parse --short HEAD)
|
||||
IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '')
|
||||
PACKAGE_LOCK_VERSION=$(jq -r '.version' package.json)
|
||||
API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")
|
||||
echo "API_VERSION=${API_VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile --filter @unraid/connect-plugin
|
||||
|
||||
- name: Download Unraid UI Components
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: unraid-wc-ui
|
||||
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/uui
|
||||
merge-multiple: true
|
||||
- name: Download Unraid Web Components
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: unraid-wc-rich
|
||||
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/nuxt
|
||||
merge-multiple: true
|
||||
- name: Download Unraid API
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: unraid-api
|
||||
path: ${{ github.workspace }}/plugin/api/
|
||||
- name: Download PNPM Store
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: packed-node-modules
|
||||
path: ${{ github.workspace }}/plugin/
|
||||
- name: Extract Unraid API
|
||||
run: |
|
||||
mkdir -p ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/unraid-api
|
||||
tar -xzf ${{ github.workspace }}/plugin/api/unraid-api.tgz -C ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/unraid-api
|
||||
- name: Build Plugin and TXZ Based on Event and Tag
|
||||
id: build-plugin
|
||||
run: |
|
||||
cd ${{ github.workspace }}/plugin
|
||||
ls -al
|
||||
pnpm run build:txz
|
||||
pnpm run build:plugin --tag="${{ inputs.TAG }}" --base-url="${{ inputs.BASE_URL }}"
|
||||
|
||||
- name: Ensure Plugin Files Exist
|
||||
run: |
|
||||
ls -al ./deploy
|
||||
if [ ! -f ./deploy/*.plg ]; then
|
||||
echo "Error: .plg file not found in plugin/deploy/"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -f ./deploy/*.txz ]; then
|
||||
echo "Error: .txz file not found in plugin/deploy/"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -f ./deploy/*.tar.xz ]; then
|
||||
echo "Error: .tar.xz file not found in plugin/deploy/"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Upload to GHA
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-plugin-${{ github.run_id }}-${{ inputs.RELEASE_TAG }}
|
||||
path: plugin/deploy/
|
||||
|
||||
- name: Upload Release Assets
|
||||
if: inputs.RELEASE_CREATED == 'true'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
RELEASE_TAG: ${{ inputs.RELEASE_TAG }}
|
||||
run: |
|
||||
# For each file in release directory
|
||||
for file in deploy/*; do
|
||||
echo "Uploading $file to release..."
|
||||
gh release upload "${RELEASE_TAG}" "$file" --clobber
|
||||
done
|
||||
|
||||
- name: Workflow Dispatch and wait
|
||||
if: inputs.RELEASE_CREATED == 'true'
|
||||
uses: the-actions-org/workflow-dispatch@v4.0.0
|
||||
with:
|
||||
workflow: release-production.yml
|
||||
inputs: '{ "version": "${{ steps.vars.outputs.API_VERSION }}" }'
|
||||
token: ${{ secrets.WORKFLOW_TRIGGER_PAT }}
|
||||
|
||||
- name: Upload to Cloudflare
|
||||
if: inputs.RELEASE_CREATED == 'false'
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: auto
|
||||
run: |
|
||||
# Sync the deploy directory to the Cloudflare bucket with explicit content encoding and public-read ACL
|
||||
aws s3 sync deploy/ s3://${{ secrets.CF_BUCKET_PREVIEW }}/${{ inputs.BUCKET_PATH }} \
|
||||
--endpoint-url ${{ secrets.CF_ENDPOINT }} \
|
||||
--checksum-algorithm CRC32 \
|
||||
--no-guess-mime-type \
|
||||
--content-encoding none \
|
||||
--acl public-read
|
||||
|
||||
- name: Comment URL
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v3
|
||||
with:
|
||||
comment-tag: prlink
|
||||
mode: recreate
|
||||
message: |
|
||||
This plugin has been deployed to Cloudflare R2 and is available for testing.
|
||||
Download it at this URL:
|
||||
```
|
||||
${{ inputs.BASE_URL }}/tag/${{ inputs.TAG }}/dynamix.unraid.net.plg
|
||||
```
|
||||
258
.github/workflows/main.yml
vendored
258
.github/workflows/main.yml
vendored
@@ -13,7 +13,6 @@ concurrency:
|
||||
jobs:
|
||||
release-please:
|
||||
name: Release Please
|
||||
# Only run release-please on pushes to main
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
@@ -21,6 +20,7 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
# Only run release-please on pushes to main
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
|
||||
- id: release
|
||||
@@ -47,7 +47,7 @@ jobs:
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential libvirt-daemon-system
|
||||
version: 1.0
|
||||
|
||||
- name: Install pnpm
|
||||
@@ -72,6 +72,45 @@ jobs:
|
||||
- name: PNPM Install
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Setup libvirt
|
||||
run: |
|
||||
# Create required groups (if they don't already exist)
|
||||
sudo groupadd -f libvirt
|
||||
sudo groupadd -f kvm
|
||||
|
||||
# Create libvirt user if not present, and add it to the kvm group
|
||||
sudo useradd -m -s /bin/bash -g libvirt libvirt || true
|
||||
sudo usermod -aG kvm libvirt || true
|
||||
|
||||
# Set up libvirt directories and permissions
|
||||
sudo mkdir -p /var/run/libvirt /var/log/libvirt /etc/libvirt
|
||||
sudo chown root:libvirt /var/run/libvirt /var/log/libvirt
|
||||
sudo chmod g+w /var/run/libvirt /var/log/libvirt
|
||||
|
||||
# Configure libvirt by appending required settings
|
||||
sudo tee -a /etc/libvirt/libvirtd.conf > /dev/null <<EOF
|
||||
unix_sock_group = "libvirt"
|
||||
unix_sock_rw_perms = "0770"
|
||||
auth_unix_rw = "none"
|
||||
EOF
|
||||
|
||||
# Add the current user to libvirt and kvm groups (note: this change won’t apply to the current session)
|
||||
sudo usermod -aG libvirt,kvm $USER
|
||||
|
||||
sudo mkdir -p /var/run/libvirt
|
||||
sudo chown root:libvirt /var/run/libvirt
|
||||
sudo chmod 775 /var/run/libvirt
|
||||
|
||||
|
||||
# Start libvirtd in the background
|
||||
sudo /usr/sbin/libvirtd --daemon
|
||||
|
||||
# Wait a bit longer for libvirtd to start
|
||||
sleep 5
|
||||
|
||||
# Verify libvirt is running using sudo to bypass group membership delays
|
||||
sudo virsh list --all || true
|
||||
|
||||
- name: Lint
|
||||
run: pnpm run lint
|
||||
|
||||
@@ -152,11 +191,11 @@ jobs:
|
||||
with:
|
||||
name: unraid-api
|
||||
path: ${{ github.workspace }}/api/deploy/unraid-api.tgz
|
||||
- name: Upload PNPM Store to Github artifacts
|
||||
- name: Upload Node Modules to Github artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: packed-pnpm-store
|
||||
path: ${{ github.workspace }}/api/deploy/packed-pnpm-store.txz
|
||||
name: packed-node-modules
|
||||
path: ${{ github.workspace }}/api/deploy/packed-node-modules.tar.xz
|
||||
|
||||
build-unraid-ui-webcomponents:
|
||||
name: Build Unraid UI Library (Webcomponent Version)
|
||||
@@ -203,6 +242,9 @@ jobs:
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile --filter @unraid/ui
|
||||
|
||||
- name: Lint
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build:wc
|
||||
|
||||
@@ -284,7 +326,7 @@ jobs:
|
||||
name: unraid-wc-rich
|
||||
path: web/.nuxt/nuxt-custom-elements/dist/unraid-components
|
||||
|
||||
build-plugin:
|
||||
build-plugin-staging-pr:
|
||||
name: Build and Deploy Plugin
|
||||
needs:
|
||||
- release-please
|
||||
@@ -292,176 +334,36 @@ jobs:
|
||||
- build-web
|
||||
- build-unraid-ui-webcomponents
|
||||
- test-api
|
||||
defaults:
|
||||
run:
|
||||
working-directory: plugin
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Set Timezone
|
||||
uses: szenius/set-timezone@v2.0
|
||||
with:
|
||||
timezoneLinux: "America/Los_Angeles"
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
uses: ./.github/workflows/build-plugin.yml
|
||||
with:
|
||||
RELEASE_CREATED: false
|
||||
TAG: ${{ github.event.pull_request.number && format('PR{0}', github.event.pull_request.number) || '' }}
|
||||
BUCKET_PATH: ${{ github.event.pull_request.number && format('unraid-api/tag/PR{0}', github.event.pull_request.number) || 'unraid-api' }}
|
||||
BASE_URL: "https://preview.dl.unraid.net/unraid-api"
|
||||
secrets:
|
||||
CF_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
CF_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
CF_BUCKET_PREVIEW: ${{ secrets.CF_BUCKET_PREVIEW }}
|
||||
CF_ENDPOINT: ${{ secrets.CF_ENDPOINT }}
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Get API Version
|
||||
id: vars
|
||||
run: |
|
||||
GIT_SHA=$(git rev-parse --short HEAD)
|
||||
IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '')
|
||||
PACKAGE_LOCK_VERSION=$(jq -r '.version' package.json)
|
||||
API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")
|
||||
echo "API_VERSION=${API_VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile --filter @unraid/connect-plugin
|
||||
|
||||
- name: Download Unraid UI Components
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: unraid-wc-ui
|
||||
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/uui
|
||||
merge-multiple: true
|
||||
- name: Download Unraid Web Components
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: unraid-wc-rich
|
||||
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/nuxt
|
||||
merge-multiple: true
|
||||
- name: Download Unraid API
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: unraid-api
|
||||
path: ${{ github.workspace }}/plugin/api/
|
||||
- name: Download PNPM Store
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: packed-pnpm-store
|
||||
path: ${{ github.workspace }}/plugin/
|
||||
- name: Extract Unraid API
|
||||
run: |
|
||||
mkdir -p ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/unraid-api
|
||||
tar -xzf ${{ github.workspace }}/plugin/api/unraid-api.tgz -C ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/unraid-api
|
||||
- name: Build Plugin and TXZ Based on Event and Tag
|
||||
id: build-plugin
|
||||
run: |
|
||||
cd ${{ github.workspace }}/plugin
|
||||
ls -al
|
||||
pnpm run build:txz
|
||||
|
||||
if [ -n "${{ github.event.pull_request.number }}" ]; then
|
||||
TAG="PR${{ github.event.pull_request.number }}"
|
||||
BUCKET_PATH="unraid-api/tag/${TAG}"
|
||||
else
|
||||
TAG=""
|
||||
BUCKET_PATH="unraid-api"
|
||||
fi
|
||||
|
||||
# On release, build both prod and preview plugins
|
||||
if [ "${{ needs.release-please.outputs.releases_created }}" == 'true' ]; then
|
||||
BASE_URL="https://stable.dl.unraid.net/unraid-api"
|
||||
pnpm run build:plugin --tag="${TAG}" --base-url="${BASE_URL}"
|
||||
mv ./deploy ./deploy-prod
|
||||
fi
|
||||
|
||||
BASE_URL="https://preview.dl.unraid.net/unraid-api"
|
||||
echo "BUCKET_PATH=${BUCKET_PATH}" >> $GITHUB_OUTPUT
|
||||
echo "TAG=${TAG}" >> $GITHUB_OUTPUT
|
||||
pnpm run build:plugin --tag="${TAG}" --base-url="${BASE_URL}"
|
||||
- name: Ensure Plugin Files Exist
|
||||
run: |
|
||||
ls -al ./deploy
|
||||
if [ ! -f ./deploy/*.plg ]; then
|
||||
echo "Error: .plg file not found in plugin/deploy/"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -f ./deploy/*.txz ]; then
|
||||
echo "Error: .txz file not found in plugin/deploy/"
|
||||
exit 1
|
||||
fi
|
||||
- name: Ensure Production Plugin Files Exist
|
||||
if: needs.release-please.outputs.releases_created == 'true'
|
||||
run: |
|
||||
ls -al ./deploy-prod
|
||||
if [ ! -f ./deploy-prod/*.plg ]; then
|
||||
echo "Error: .plg file not found in plugin/deploy-prod/"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -f ./deploy-prod/*.txz ]; then
|
||||
echo "Error: .txz file not found in plugin/deploy-prod/"
|
||||
exit 1
|
||||
fi
|
||||
- name: Upload to GHA
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-plugin
|
||||
path: plugin/deploy/
|
||||
- name: Upload to Cloudflare
|
||||
if: github.event_name == 'pull_request' || startsWith(github.ref, 'refs/heads/main')
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: auto
|
||||
run: |
|
||||
# Sync the deploy directory to the Cloudflare bucket with explicit content encoding and public-read ACL
|
||||
aws s3 sync deploy/ s3://${{ secrets.CF_BUCKET_PREVIEW }}/${{ steps.build-plugin.outputs.BUCKET_PATH }} \
|
||||
--endpoint-url ${{ secrets.CF_ENDPOINT }} \
|
||||
--checksum-algorithm CRC32 \
|
||||
--no-guess-mime-type \
|
||||
--content-encoding none \
|
||||
--acl public-read
|
||||
|
||||
- name: Upload Release Assets
|
||||
if: needs.release-please.outputs.releases_created == 'true'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
release_name=$(gh release list --repo ${{ github.repository }} --json name,isDraft --jq '.[] | select(.isDraft == true) | .name' | head -n 1)
|
||||
# For each file in release directory
|
||||
for file in deploy-prod/*; do
|
||||
echo "Uploading $file to release..."
|
||||
gh release upload "${release_name}" "$file" --clobber
|
||||
done
|
||||
|
||||
- name: Comment URL
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v3
|
||||
with:
|
||||
comment-tag: prlink
|
||||
mode: recreate
|
||||
message: |
|
||||
This plugin has been deployed to Cloudflare R2 and is available for testing.
|
||||
Download it at this URL:
|
||||
```
|
||||
https://preview.dl.unraid.net/unraid-api/tag/${{ steps.build-plugin.outputs.tag }}/dynamix.unraid.net.plg
|
||||
```
|
||||
build-plugin-production:
|
||||
if: ${{ needs.release-please.outputs.releases_created == 'true' }}
|
||||
name: Build and Deploy Production Plugin
|
||||
needs:
|
||||
- release-please
|
||||
- build-api
|
||||
- build-web
|
||||
- build-unraid-ui-webcomponents
|
||||
- test-api
|
||||
uses: ./.github/workflows/build-plugin.yml
|
||||
with:
|
||||
RELEASE_CREATED: true
|
||||
RELEASE_TAG: ${{ needs.release-please.outputs.tag_name }}
|
||||
TAG: ""
|
||||
BUCKET_PATH: unraid-api
|
||||
BASE_URL: "https://stable.dl.unraid.net/unraid-api"
|
||||
secrets:
|
||||
CF_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
CF_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
CF_BUCKET_PREVIEW: ${{ secrets.CF_BUCKET_PREVIEW }}
|
||||
CF_ENDPOINT: ${{ secrets.CF_ENDPOINT }}
|
||||
|
||||
17
.github/workflows/release-production.yml
vendored
17
.github/workflows/release-production.yml
vendored
@@ -1,17 +1,14 @@
|
||||
name: Publish Release to Digital Ocean
|
||||
name: Publish Release
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: 'Tag to update'
|
||||
description: 'Tag to release - will replace active release'
|
||||
required: true
|
||||
|
||||
|
||||
jobs:
|
||||
publish-to-digital-ocean:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
@@ -34,6 +31,8 @@ jobs:
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '22.x'
|
||||
- run: |
|
||||
echo '${{ steps.release-info.outputs.body }}' >> release-notes.txt
|
||||
- run: npm install html-escaper@2 xml2js
|
||||
- name: Update Plugin Changelog
|
||||
uses: actions/github-script@v7
|
||||
@@ -41,7 +40,8 @@ jobs:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
const { escape } = require('html-escaper');
|
||||
const releaseNotes = escape(`${{ steps.release-info.outputs.body }}`);
|
||||
|
||||
const releaseNotes = escape(fs.readFileSync('release-notes.txt', 'utf8'));
|
||||
|
||||
if (!releaseNotes) {
|
||||
console.error('No release notes found');
|
||||
@@ -92,6 +92,9 @@ jobs:
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
- name: Cleanup Inline Scripts
|
||||
run: |
|
||||
rm -rf node_modules/
|
||||
- name: Upload Release Files to DO Spaces
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.DO_ACCESS_KEY }}
|
||||
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -105,4 +105,7 @@ result-*
|
||||
web/scripts/.sync-webgui-repo-*
|
||||
|
||||
# Activation code data
|
||||
plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/data/activation-data.php
|
||||
plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/data/activation-data.php
|
||||
|
||||
# Config file that changes between versions
|
||||
api/dev/Unraid.net/myservers.cfg
|
||||
@@ -1 +1 @@
|
||||
{".":"4.6.0"}
|
||||
{".":"4.7.0"}
|
||||
|
||||
@@ -9,6 +9,8 @@ PATHS_MY_SERVERS_CONFIG=./dev/Unraid.net/myservers.cfg # My servers config file
|
||||
PATHS_MY_SERVERS_FB=./dev/Unraid.net/fb_keepalive # My servers flashbackup timekeeper file
|
||||
PATHS_KEYFILE_BASE=./dev/Unraid.net # Keyfile location
|
||||
PATHS_MACHINE_ID=./dev/data/machine-id
|
||||
PATHS_PARITY_CHECKS=./dev/states/parity-checks.log
|
||||
PATHS_CONFIG_MODULES=./dev/configs
|
||||
ENVIRONMENT="development"
|
||||
NODE_ENV="development"
|
||||
PORT="3001"
|
||||
@@ -20,4 +22,4 @@ BYPASS_PERMISSION_CHECKS=false
|
||||
BYPASS_CORS_CHECKS=true
|
||||
CHOKIDAR_USEPOLLING=true
|
||||
LOG_TRANSPORT=console
|
||||
LOG_LEVEL=trace
|
||||
LOG_LEVEL=trace
|
||||
|
||||
@@ -2,3 +2,4 @@ ENVIRONMENT="production"
|
||||
NODE_ENV="production"
|
||||
PORT="/var/run/unraid-api.sock"
|
||||
MOTHERSHIP_GRAPHQL_LINK="https://mothership.unraid.net/ws"
|
||||
PATHS_CONFIG_MODULES="/boot/config/plugins/dynamix.my.servers/configs"
|
||||
|
||||
@@ -2,3 +2,4 @@ ENVIRONMENT="staging"
|
||||
NODE_ENV="production"
|
||||
PORT="/var/run/unraid-api.sock"
|
||||
MOTHERSHIP_GRAPHQL_LINK="https://staging.mothership.unraid.net/ws"
|
||||
PATHS_CONFIG_MODULES="/boot/config/plugins/dynamix.my.servers/configs"
|
||||
|
||||
@@ -9,5 +9,7 @@ PATHS_MY_SERVERS_CONFIG=./dev/Unraid.net/myservers.cfg # My servers config file
|
||||
PATHS_MY_SERVERS_FB=./dev/Unraid.net/fb_keepalive # My servers flashbackup timekeeper file
|
||||
PATHS_KEYFILE_BASE=./dev/Unraid.net # Keyfile location
|
||||
PATHS_MACHINE_ID=./dev/data/machine-id
|
||||
PATHS_PARITY_CHECKS=./dev/states/parity-checks.log
|
||||
PATHS_CONFIG_MODULES=./dev/configs
|
||||
PORT=5000
|
||||
NODE_ENV="test"
|
||||
NODE_ENV="test"
|
||||
|
||||
@@ -1,5 +1,72 @@
|
||||
# Changelog
|
||||
|
||||
## [4.7.0](https://github.com/unraid/api/compare/v4.6.6...v4.7.0) (2025-04-24)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add basic docker network listing ([#1317](https://github.com/unraid/api/issues/1317)) ([c4fdff8](https://github.com/unraid/api/commit/c4fdff8149eb2812707605b3a98eabc795d18c5e))
|
||||
* add permission documentation by using a custom decorator ([#1355](https://github.com/unraid/api/issues/1355)) ([45ecab6](https://github.com/unraid/api/commit/45ecab6914e2e4dd48438352eb9a5084a6a4b996))
|
||||
* basic vm controls ([#1293](https://github.com/unraid/api/issues/1293)) ([bc3ca92](https://github.com/unraid/api/commit/bc3ca92fb02387bc019bb001809df96974737b50))
|
||||
* code first graphql ([#1347](https://github.com/unraid/api/issues/1347)) ([f5724ab](https://github.com/unraid/api/commit/f5724abffbcb8c8a4885c487df4119787fd1d541))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* container names always null ([#1335](https://github.com/unraid/api/issues/1335)) ([8a5b238](https://github.com/unraid/api/commit/8a5b23856c006827229812e558f7d1af92be80e0))
|
||||
* **deps:** update all non-major dependencies ([#1337](https://github.com/unraid/api/issues/1337)) ([2345732](https://github.com/unraid/api/commit/234573264cfed1409a767927ff95f132be393ea9))
|
||||
* hide reboot notice for patch releases ([#1341](https://github.com/unraid/api/issues/1341)) ([4b57439](https://github.com/unraid/api/commit/4b5743906a172f84bb46011fe2c3e0c8f64059a2))
|
||||
* move docker mutations to the mutations resolver ([#1333](https://github.com/unraid/api/issues/1333)) ([1bbe7d2](https://github.com/unraid/api/commit/1bbe7d27b0e87b5ffcd57ac9cc28e64b046055be))
|
||||
* PR build issue ([457d338](https://github.com/unraid/api/commit/457d338150774ddc14cde6562e226a6a565aca48))
|
||||
* remove some unused fields from the report object ([#1342](https://github.com/unraid/api/issues/1342)) ([cd323ac](https://github.com/unraid/api/commit/cd323acd4905a558786b029ff5a30371c4512956))
|
||||
* sso unreliable if API outputs more than raw json ([#1353](https://github.com/unraid/api/issues/1353)) ([e65775f](https://github.com/unraid/api/commit/e65775f8782714d1cc29c8f2801244b5a4043409))
|
||||
* vms now can detect starting of libvirt and start local hypervisor ([#1356](https://github.com/unraid/api/issues/1356)) ([ad0f4c8](https://github.com/unraid/api/commit/ad0f4c8b55c7f7e94fbae2108f17715b1373a3ef))
|
||||
|
||||
## [4.6.6](https://github.com/unraid/api/compare/v4.6.5...v4.6.6) (2025-04-03)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* issue with invalid builds for prod and tagging ([7e89cd2](https://github.com/unraid/api/commit/7e89cd2a3e06a4abc8164f2f4985ad9f6cc9388d))
|
||||
|
||||
## [4.6.5](https://github.com/unraid/api/compare/v4.6.4...v4.6.5) (2025-04-03)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* unique artifact ID ([0f682b5](https://github.com/unraid/api/commit/0f682b5f23f4319a1ad8f0e8f2b5e5ae0a2293db))
|
||||
|
||||
## [4.6.4](https://github.com/unraid/api/compare/v4.6.3...v4.6.4) (2025-04-03)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* cleanup build pipeline ([#1326](https://github.com/unraid/api/issues/1326)) ([60f16bd](https://github.com/unraid/api/commit/60f16bde416993771fce2ad5861a671504af4b7d))
|
||||
* remove unneeded workflow secret pass ([4bb00dd](https://github.com/unraid/api/commit/4bb00dd981384083cec40d804209ec2ca18d7aae))
|
||||
|
||||
## [4.6.3](https://github.com/unraid/api/compare/v4.6.2...v4.6.3) (2025-04-03)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* copy dynamix.unraid.net ([662d5f6](https://github.com/unraid/api/commit/662d5f64c94586e35bfdaae2df0716c3754b2c45))
|
||||
* make backup of txz ([37e72f9](https://github.com/unraid/api/commit/37e72f9729f6ab385ed1070fbdca6028688fbd92))
|
||||
* ordering in build script ([a562f77](https://github.com/unraid/api/commit/a562f7716380bde4a1ae0d6960eff51c37b9291c))
|
||||
|
||||
## [4.6.2](https://github.com/unraid/api/compare/v4.6.1...v4.6.2) (2025-04-03)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* build issue ([99d8b31](https://github.com/unraid/api/commit/99d8b31fa8bef13ae6c7dcf74593bc2999a676ed))
|
||||
|
||||
## [4.6.1](https://github.com/unraid/api/compare/v4.6.0...v4.6.1) (2025-04-03)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* don't mv deploy on prod release ([9568aab](https://github.com/unraid/api/commit/9568aabd17fbab9e7e2f06f723ee57dc2026583c))
|
||||
|
||||
## [4.6.0](https://github.com/unraid/api/compare/v4.5.0...v4.6.0) (2025-04-03)
|
||||
|
||||
|
||||
|
||||
@@ -1,17 +1,13 @@
|
||||
import type { CodegenConfig } from '@graphql-codegen/cli';
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
const config: CodegenConfig = {
|
||||
overwrite: true,
|
||||
emitLegacyCommonJSImports: false,
|
||||
verbose: true,
|
||||
config: {
|
||||
namingConvention: {
|
||||
typeNames: './fix-array-type.cjs',
|
||||
enumValues: 'change-case#upperCase',
|
||||
enumValues: 'change-case-all#upperCase',
|
||||
transformUnderscore: true,
|
||||
useTypeImports: true,
|
||||
},
|
||||
scalars: {
|
||||
@@ -31,6 +27,7 @@ const config: CodegenConfig = {
|
||||
},
|
||||
},
|
||||
generates: {
|
||||
// Generate Types for Mothership GraphQL Client
|
||||
'src/graphql/generated/client/': {
|
||||
documents: './src/graphql/mothership/*.ts',
|
||||
schema: {
|
||||
@@ -50,40 +47,6 @@ const config: CodegenConfig = {
|
||||
},
|
||||
plugins: [{ add: { content: '/* eslint-disable */' } }],
|
||||
},
|
||||
// Generate Types for the API Server
|
||||
'src/graphql/generated/api/types.ts': {
|
||||
schema: ['./src/graphql/types.ts', './src/graphql/schema/types/**/*.graphql'],
|
||||
plugins: [
|
||||
'typescript',
|
||||
'typescript-resolvers',
|
||||
{ add: { content: '/* eslint-disable */\n/* @ts-nocheck */' } },
|
||||
],
|
||||
config: {
|
||||
contextType: '@app/graphql/schema/utils.js#Context',
|
||||
useIndexSignature: true,
|
||||
},
|
||||
},
|
||||
// Generate Operations for any built-in API Server Operations (e.g., report.ts)
|
||||
'src/graphql/generated/api/operations.ts': {
|
||||
documents: './src/graphql/client/api/*.ts',
|
||||
schema: ['./src/graphql/types.ts', './src/graphql/schema/types/**/*.graphql'],
|
||||
preset: 'import-types',
|
||||
presetConfig: {
|
||||
typesPath: '@app/graphql/generated/api/types.js',
|
||||
},
|
||||
plugins: [
|
||||
'typescript-validation-schema',
|
||||
'typescript-operations',
|
||||
'typed-document-node',
|
||||
{ add: { content: '/* eslint-disable */' } },
|
||||
],
|
||||
config: {
|
||||
importFrom: '@app/graphql/generated/api/types.js',
|
||||
strictScalars: true,
|
||||
schema: 'zod',
|
||||
withObjectType: true,
|
||||
},
|
||||
},
|
||||
'src/graphql/generated/client/validators.ts': {
|
||||
schema: {
|
||||
[process.env.MOTHERSHIP_GRAPHQL_LINK as string]: {
|
||||
@@ -102,4 +65,4 @@ const config: CodegenConfig = {
|
||||
},
|
||||
};
|
||||
|
||||
export default config;
|
||||
export default config;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
[api]
|
||||
version="4.1.3"
|
||||
version="4.6.6"
|
||||
extraOrigins="https://google.com,https://test.com"
|
||||
[local]
|
||||
sandbox="yes"
|
||||
|
||||
20
api/dev/Unraid.net/myservers.example.cfg
Normal file
20
api/dev/Unraid.net/myservers.example.cfg
Normal file
@@ -0,0 +1,20 @@
|
||||
[api]
|
||||
version="4.4.1"
|
||||
extraOrigins="https://google.com,https://test.com"
|
||||
[local]
|
||||
sandbox="yes"
|
||||
[remote]
|
||||
wanaccess="yes"
|
||||
wanport="8443"
|
||||
upnpEnabled="no"
|
||||
apikey="_______________________BIG_API_KEY_HERE_________________________"
|
||||
localApiKey="_______________________LOCAL_API_KEY_HERE_________________________"
|
||||
email="test@example.com"
|
||||
username="zspearmint"
|
||||
avatar="https://via.placeholder.com/200"
|
||||
regWizTime="1611175408732_0951-1653-3509-FBA155FA23C0"
|
||||
accesstoken=""
|
||||
idtoken=""
|
||||
refreshtoken=""
|
||||
dynamicRemoteAccessType="DISABLED"
|
||||
ssoSubIds=""
|
||||
23
api/dev/configs/connect.json
Normal file
23
api/dev/configs/connect.json
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"demo": "2025-04-21T14:27:27.631Z",
|
||||
"wanaccess": "yes",
|
||||
"wanport": "8443",
|
||||
"upnpEnabled": "no",
|
||||
"apikey": "_______________________BIG_API_KEY_HERE_________________________",
|
||||
"localApiKey": "_______________________LOCAL_API_KEY_HERE_________________________",
|
||||
"email": "test@example.com",
|
||||
"username": "zspearmint",
|
||||
"avatar": "https://via.placeholder.com/200",
|
||||
"regWizTime": "1611175408732_0951-1653-3509-FBA155FA23C0",
|
||||
"accesstoken": "",
|
||||
"idtoken": "",
|
||||
"refreshtoken": "",
|
||||
"dynamicRemoteAccessType": "DISABLED",
|
||||
"ssoSubIds": "",
|
||||
"version": "4.6.6",
|
||||
"extraOrigins": [
|
||||
"https://google.com",
|
||||
"https://test.com"
|
||||
],
|
||||
"sandbox": "yes"
|
||||
}
|
||||
@@ -6,6 +6,6 @@
|
||||
"name": "Connect",
|
||||
"permissions": [],
|
||||
"roles": [
|
||||
"connect"
|
||||
"CONNECT"
|
||||
]
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
[api]
|
||||
version="4.4.1"
|
||||
version="4.6.6"
|
||||
extraOrigins="https://google.com,https://test.com"
|
||||
[local]
|
||||
sandbox="yes"
|
||||
|
||||
31
api/docs/developer/api-plugins.md
Normal file
31
api/docs/developer/api-plugins.md
Normal file
@@ -0,0 +1,31 @@
|
||||
# Working with API plugins
|
||||
|
||||
Under the hood, API plugins (i.e. plugins to the `@unraid/api` project) are represented
|
||||
as npm `peerDependencies`. This is npm's intended package plugin mechanism, and given that
|
||||
peer dependencies are installed by default as of npm v7, it supports bi-directional plugin functionality,
|
||||
where the API provides dependencies for the plugin while the plugin provides functionality to the API.
|
||||
|
||||
## Private Workspace plugins
|
||||
|
||||
### Adding a local workspace package as an API plugin
|
||||
|
||||
The challenge with local workspace plugins is that they aren't available via npm during production.
|
||||
To solve this, we vendor them inside `dist/plugins`. To prevent the build from breaking, however,
|
||||
you should mark the workspace dependency as optional. For example:
|
||||
|
||||
```json
|
||||
{
|
||||
"peerDependencies": {
|
||||
"unraid-api-plugin-connect": "workspace:*"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"unraid-api-plugin-connect": {
|
||||
"optional": true
|
||||
}
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
By marking the workspace dependency "optional", npm will not attempt to install it.
|
||||
Thus, even though the "workspace:*" identifier will be invalid during build-time and run-time,
|
||||
it will not cause problems.
|
||||
@@ -11,7 +11,7 @@
|
||||
"max_restarts": 10,
|
||||
"min_uptime": 10000,
|
||||
"watch": false,
|
||||
"interpreter": "/usr/local/node/bin/node",
|
||||
"interpreter": "/usr/local/bin/node",
|
||||
"ignore_watch": ["node_modules", "src", ".env.*", "myservers.cfg"],
|
||||
"log_file": "/var/log/graphql-api.log",
|
||||
"kill_timeout": 10000
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
/**
|
||||
* This function wraps constant case, that turns any string into CONSTANT_CASE
|
||||
* However, this function has a bug that, if you pass _ to it it will return an empty
|
||||
* string. This small module fixes that
|
||||
*
|
||||
* @param {string*} str
|
||||
* @return {string}
|
||||
*/
|
||||
function FixArrayType(str) {
|
||||
if (str === 'Array') {
|
||||
return 'ArrayType';
|
||||
}
|
||||
|
||||
// If result is an empty string, just return the original string
|
||||
return str;
|
||||
}
|
||||
|
||||
module.exports = FixArrayType;
|
||||
1563
api/generated-schema-new.graphql
Normal file
1563
api/generated-schema-new.graphql
Normal file
File diff suppressed because it is too large
Load Diff
1612
api/generated-schema.graphql
Normal file
1612
api/generated-schema.graphql
Normal file
File diff suppressed because it is too large
Load Diff
@@ -14,3 +14,7 @@ default:
|
||||
|
||||
alias b := build
|
||||
alias d := deploy
|
||||
|
||||
sync-env server:
|
||||
rsync -avz --progress --stats -e ssh .env* root@{{server}}:/usr/local/unraid-api
|
||||
ssh root@{{server}} 'cp /usr/local/unraid-api/.env.staging /usr/local/unraid-api/.env'
|
||||
|
||||
3
api/legacy/README.md
Normal file
3
api/legacy/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# Legacy Assets
|
||||
|
||||
This folder will store legacy types / functionality that may be useful but is not currently a part of the API
|
||||
1365
api/legacy/generated-schema-legacy.graphql
Normal file
1365
api/legacy/generated-schema-legacy.graphql
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@unraid/api",
|
||||
"version": "4.6.0",
|
||||
"version": "4.7.0",
|
||||
"main": "src/cli/index.ts",
|
||||
"type": "module",
|
||||
"corepack": {
|
||||
@@ -20,7 +20,7 @@
|
||||
"command:raw": "./dist/cli.js",
|
||||
"// Build and Deploy": "",
|
||||
"build": "vite build --mode=production",
|
||||
"postbuild": "chmod +x dist/main.js && chmod +x dist/cli.js",
|
||||
"postbuild": "chmod +x dist/main.js && chmod +x dist/cli.js && node scripts/copy-plugins.js",
|
||||
"build:watch": "nodemon --watch src --ext ts,js,json --exec 'tsx ./scripts/build.ts'",
|
||||
"build:docker": "./scripts/dc.sh run --rm builder",
|
||||
"build:release": "tsx ./scripts/build.ts",
|
||||
@@ -43,7 +43,9 @@
|
||||
"container:start": "pnpm run container:stop && ./scripts/dc.sh run --rm --service-ports dev",
|
||||
"container:stop": "./scripts/dc.sh stop dev",
|
||||
"container:test": "./scripts/dc.sh run --rm builder pnpm run test",
|
||||
"container:enter": "./scripts/dc.sh exec dev /bin/bash"
|
||||
"container:enter": "./scripts/dc.sh exec dev /bin/bash",
|
||||
"// Migration Scripts": "",
|
||||
"migration:codefirst": "tsx ./src/unraid-api/graph/migration-script.ts"
|
||||
},
|
||||
"bin": {
|
||||
"unraid-api": "dist/cli.js"
|
||||
@@ -61,7 +63,9 @@
|
||||
"@graphql-tools/utils": "^10.5.5",
|
||||
"@jsonforms/core": "^3.5.1",
|
||||
"@nestjs/apollo": "^13.0.3",
|
||||
"@nestjs/cache-manager": "^3.0.1",
|
||||
"@nestjs/common": "^11.0.11",
|
||||
"@nestjs/config": "^4.0.2",
|
||||
"@nestjs/core": "^11.0.11",
|
||||
"@nestjs/graphql": "^13.0.3",
|
||||
"@nestjs/passport": "^11.0.0",
|
||||
@@ -71,15 +75,18 @@
|
||||
"@reduxjs/toolkit": "^2.3.0",
|
||||
"@runonflux/nat-upnp": "^1.0.2",
|
||||
"@types/diff": "^7.0.1",
|
||||
"@unraid/libvirt": "^1.1.3",
|
||||
"@unraid/libvirt": "^2.1.0",
|
||||
"accesscontrol": "^2.2.1",
|
||||
"bycontract": "^2.0.11",
|
||||
"bytes": "^3.1.2",
|
||||
"cache-manager": "^6.4.2",
|
||||
"cacheable-lookup": "^7.0.0",
|
||||
"camelcase-keys": "^9.1.3",
|
||||
"casbin": "^5.32.0",
|
||||
"change-case": "^5.4.4",
|
||||
"chokidar": "^4.0.1",
|
||||
"class-transformer": "^0.5.1",
|
||||
"class-validator": "^0.14.1",
|
||||
"cli-table": "^0.3.11",
|
||||
"command-exists": "^1.2.9",
|
||||
"convert": "^5.8.0",
|
||||
@@ -87,8 +94,7 @@
|
||||
"cron": "3.5.0",
|
||||
"cross-fetch": "^4.0.0",
|
||||
"diff": "^7.0.0",
|
||||
"docker-event-emitter": "^0.3.0",
|
||||
"dockerode": "^3.3.5",
|
||||
"dockerode": "^4.0.5",
|
||||
"dotenv": "^16.4.5",
|
||||
"execa": "^9.5.1",
|
||||
"exit-hook": "^4.0.0",
|
||||
@@ -136,6 +142,14 @@
|
||||
"zen-observable-ts": "^1.1.0",
|
||||
"zod": "^3.23.8"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"unraid-api-plugin-connect": "workspace:*"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"unraid-api-plugin-connect": {
|
||||
"optional": true
|
||||
}
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.21.0",
|
||||
"@graphql-codegen/add": "^5.0.3",
|
||||
@@ -163,6 +177,7 @@
|
||||
"@types/ini": "^4.1.1",
|
||||
"@types/ip": "^1.1.3",
|
||||
"@types/lodash": "^4.17.13",
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/mustache": "^4.2.5",
|
||||
"@types/node": "^22.13.4",
|
||||
"@types/pify": "^6.0.0",
|
||||
@@ -193,7 +208,7 @@
|
||||
"typescript-eslint": "^8.13.0",
|
||||
"unplugin-swc": "^1.5.1",
|
||||
"vite": "^6.0.0",
|
||||
"vite-plugin-node": "^4.0.0",
|
||||
"vite-plugin-node": "^5.0.0",
|
||||
"vite-tsconfig-paths": "^5.1.0",
|
||||
"vitest": "^3.0.5",
|
||||
"zx": "^8.3.2"
|
||||
@@ -201,8 +216,15 @@
|
||||
"overrides": {
|
||||
"eslint": {
|
||||
"jiti": "2"
|
||||
},
|
||||
"@as-integrations/fastify": {
|
||||
"fastify": "$fastify"
|
||||
},
|
||||
"nest-authz": {
|
||||
"@nestjs/common": "$@nestjs/common",
|
||||
"@nestjs/core": "$@nestjs/core"
|
||||
}
|
||||
},
|
||||
"private": true,
|
||||
"packageManager": "pnpm@10.7.1"
|
||||
"packageManager": "pnpm@10.8.1"
|
||||
}
|
||||
|
||||
@@ -1,11 +1,17 @@
|
||||
#!/usr/bin/env zx
|
||||
import { mkdir, readFile, rm, writeFile } from 'fs/promises';
|
||||
import { mkdir, readFile, writeFile } from 'fs/promises';
|
||||
import { exit } from 'process';
|
||||
|
||||
import type { PackageJson } from 'type-fest';
|
||||
import { $, cd } from 'zx';
|
||||
|
||||
import { getDeploymentVersion } from './get-deployment-version.js';
|
||||
|
||||
type ApiPackageJson = PackageJson & {
|
||||
version: string;
|
||||
peerDependencies: Record<string, string>;
|
||||
};
|
||||
|
||||
try {
|
||||
// Create release and pack directories
|
||||
await mkdir('./deploy/release', { recursive: true });
|
||||
@@ -19,13 +25,12 @@ try {
|
||||
|
||||
// Get package details
|
||||
const packageJson = await readFile('./package.json', 'utf-8');
|
||||
const parsedPackageJson = JSON.parse(packageJson);
|
||||
|
||||
const parsedPackageJson = JSON.parse(packageJson) as ApiPackageJson;
|
||||
const deploymentVersion = await getDeploymentVersion(process.env, parsedPackageJson.version);
|
||||
|
||||
// Update the package.json version to the deployment version
|
||||
parsedPackageJson.version = deploymentVersion;
|
||||
// omit dev dependencies from release build
|
||||
// omit dev dependencies from vendored dependencies in release build
|
||||
parsedPackageJson.devDependencies = {};
|
||||
|
||||
// Create a temporary directory for packaging
|
||||
@@ -38,18 +43,19 @@ try {
|
||||
// Change to the pack directory and install dependencies
|
||||
cd('./deploy/pack');
|
||||
|
||||
console.log('Building production pnpm store...');
|
||||
console.log('Building production node_modules...');
|
||||
$.verbose = true;
|
||||
await $`pnpm install --prod --ignore-workspace --store-dir=../.pnpm-store`;
|
||||
await $`npm install --omit=dev`;
|
||||
|
||||
await writeFile('package.json', JSON.stringify(parsedPackageJson, null, 4));
|
||||
|
||||
await $`rm -rf node_modules`; // Don't include node_modules in final package
|
||||
|
||||
const sudoCheck = await $`command -v sudo`.nothrow();
|
||||
const SUDO = sudoCheck.exitCode === 0 ? 'sudo' : '';
|
||||
await $`${SUDO} chown -R 0:0 ../.pnpm-store`;
|
||||
await $`${SUDO} chown -R 0:0 node_modules`;
|
||||
|
||||
await $`XZ_OPT=-5 tar -cJf ../packed-pnpm-store.txz ../.pnpm-store`;
|
||||
await $`${SUDO} rm -rf ../.pnpm-store`;
|
||||
await $`XZ_OPT=-5 tar -cJf packed-node-modules.tar.xz node_modules`;
|
||||
await $`mv packed-node-modules.tar.xz ../`;
|
||||
await $`${SUDO} rm -rf node_modules`;
|
||||
|
||||
// chmod the cli
|
||||
await $`chmod +x ./dist/cli.js`;
|
||||
|
||||
59
api/scripts/copy-plugins.js
Normal file
59
api/scripts/copy-plugins.js
Normal file
@@ -0,0 +1,59 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* This AI-generated script copies workspace plugin dist folders to the dist/plugins directory
|
||||
* to ensure they're available for dynamic imports in production.
|
||||
*/
|
||||
import { execSync } from 'child_process';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
// Get the package.json to find workspace dependencies
|
||||
const packageJsonPath = path.resolve(__dirname, '../package.json');
|
||||
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf8'));
|
||||
|
||||
// Create the plugins directory if it doesn't exist
|
||||
const pluginsDir = path.resolve(__dirname, '../dist/plugins');
|
||||
if (!fs.existsSync(pluginsDir)) {
|
||||
fs.mkdirSync(pluginsDir, { recursive: true });
|
||||
}
|
||||
|
||||
// Find all workspace plugins
|
||||
const pluginPrefix = 'unraid-api-plugin-';
|
||||
const workspacePlugins = Object.keys(packageJson.peerDependencies || {}).filter((pkgName) =>
|
||||
pkgName.startsWith(pluginPrefix)
|
||||
);
|
||||
|
||||
// Copy each plugin's dist folder to the plugins directory
|
||||
for (const pkgName of workspacePlugins) {
|
||||
const pluginPath = path.resolve(__dirname, `../../packages/${pkgName}`);
|
||||
const pluginDistPath = path.resolve(pluginPath, 'dist');
|
||||
const targetPath = path.resolve(pluginsDir, pkgName);
|
||||
|
||||
console.log(`Building ${pkgName}...`);
|
||||
try {
|
||||
execSync('pnpm build', {
|
||||
cwd: pluginPath,
|
||||
stdio: 'inherit',
|
||||
});
|
||||
console.log(`Successfully built ${pkgName}`);
|
||||
} catch (error) {
|
||||
console.error(`Failed to build ${pkgName}:`, error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!fs.existsSync(pluginDistPath)) {
|
||||
console.warn(`Plugin ${pkgName} dist folder not found at ${pluginDistPath}`);
|
||||
process.exit(1);
|
||||
}
|
||||
console.log(`Copying ${pkgName} dist folder to ${targetPath}`);
|
||||
fs.mkdirSync(targetPath, { recursive: true });
|
||||
fs.cpSync(pluginDistPath, targetPath, { recursive: true });
|
||||
console.log(`Successfully copied ${pkgName} dist folder`);
|
||||
}
|
||||
|
||||
console.log('Plugin dist folders copied successfully');
|
||||
@@ -29,7 +29,7 @@ fi
|
||||
destination_directory="/usr/local/unraid-api"
|
||||
|
||||
# Replace the value inside the rsync command with the user's input
|
||||
rsync_command="rsync -avz --progress --stats -e ssh \"$source_directory\" \"root@${server_name}:$destination_directory\""
|
||||
rsync_command="rsync -avz --delete --progress --stats -e ssh \"$source_directory\" \"root@${server_name}:$destination_directory\""
|
||||
|
||||
echo "Executing the following command:"
|
||||
echo "$rsync_command"
|
||||
|
||||
@@ -9,36 +9,37 @@ import { expect, test } from 'vitest';
|
||||
|
||||
test('Returns allowed origins', async () => {
|
||||
// Load state files into store
|
||||
await store.dispatch(loadStateFiles());
|
||||
await store.dispatch(loadConfigFile());
|
||||
await store.dispatch(loadStateFiles()).unwrap();
|
||||
await store.dispatch(loadConfigFile()).unwrap();
|
||||
|
||||
// Get allowed origins
|
||||
expect(getAllowedOrigins()).toMatchInlineSnapshot(`
|
||||
[
|
||||
"/var/run/unraid-notifications.sock",
|
||||
"/var/run/unraid-php.sock",
|
||||
"/var/run/unraid-cli.sock",
|
||||
"http://localhost:8080",
|
||||
"https://localhost:4443",
|
||||
"https://tower.local:4443",
|
||||
"https://192.168.1.150:4443",
|
||||
"https://tower:4443",
|
||||
"https://192-168-1-150.thisisfourtyrandomcharacters012345678900.myunraid.net:4443",
|
||||
"https://85-121-123-122.thisisfourtyrandomcharacters012345678900.myunraid.net:8443",
|
||||
"https://10-252-0-1.hash.myunraid.net:4443",
|
||||
"https://10-252-1-1.hash.myunraid.net:4443",
|
||||
"https://10-253-3-1.hash.myunraid.net:4443",
|
||||
"https://10-253-4-1.hash.myunraid.net:4443",
|
||||
"https://10-253-5-1.hash.myunraid.net:4443",
|
||||
"https://10-100-0-1.hash.myunraid.net:4443",
|
||||
"https://10-100-0-2.hash.myunraid.net:4443",
|
||||
"https://10-123-1-2.hash.myunraid.net:4443",
|
||||
"https://221-123-121-112.hash.myunraid.net:4443",
|
||||
"https://google.com",
|
||||
"https://test.com",
|
||||
"https://connect.myunraid.net",
|
||||
"https://connect-staging.myunraid.net",
|
||||
"https://dev-my.myunraid.net:4000",
|
||||
]
|
||||
`);
|
||||
const allowedOrigins = getAllowedOrigins();
|
||||
|
||||
// Test that the result is an array
|
||||
expect(Array.isArray(allowedOrigins)).toBe(true);
|
||||
|
||||
// Test that it contains the expected socket paths
|
||||
expect(allowedOrigins).toContain('/var/run/unraid-notifications.sock');
|
||||
expect(allowedOrigins).toContain('/var/run/unraid-php.sock');
|
||||
expect(allowedOrigins).toContain('/var/run/unraid-cli.sock');
|
||||
|
||||
// Test that it contains the expected local URLs
|
||||
expect(allowedOrigins).toContain('http://localhost:8080');
|
||||
expect(allowedOrigins).toContain('https://localhost:4443');
|
||||
|
||||
// Test that it contains the expected connect URLs
|
||||
expect(allowedOrigins).toContain('https://connect.myunraid.net');
|
||||
expect(allowedOrigins).toContain('https://connect-staging.myunraid.net');
|
||||
expect(allowedOrigins).toContain('https://dev-my.myunraid.net:4000');
|
||||
|
||||
// Test that it contains the extra origins from config
|
||||
expect(allowedOrigins).toContain('https://google.com');
|
||||
expect(allowedOrigins).toContain('https://test.com');
|
||||
|
||||
// Test that it contains some of the remote URLs
|
||||
expect(allowedOrigins).toContain('https://tower.local:4443');
|
||||
expect(allowedOrigins).toContain('https://192.168.1.150:4443');
|
||||
|
||||
// Test that there are no duplicates
|
||||
expect(allowedOrigins.length).toBe(new Set(allowedOrigins).size);
|
||||
});
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
import { test } from 'vitest';
|
||||
|
||||
test.todo('Adds a disk to the array');
|
||||
|
||||
test.todo('Fails to add the disk if the array is started');
|
||||
@@ -1,209 +0,0 @@
|
||||
import { expect, test, vi } from 'vitest';
|
||||
|
||||
import { getArrayData } from '@app/core/modules/array/get-array-data.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadConfigFile } from '@app/store/modules/config.js';
|
||||
import { loadStateFiles } from '@app/store/modules/emhttp.js';
|
||||
|
||||
vi.mock('@app/core/pubsub.js', () => ({
|
||||
pubsub: { publish: vi.fn() },
|
||||
}));
|
||||
|
||||
test('Creates an array event', async () => {
|
||||
// Load state files into store
|
||||
await store.dispatch(loadStateFiles());
|
||||
|
||||
await store.dispatch(loadConfigFile());
|
||||
|
||||
const arrayEvent = getArrayData(store.getState);
|
||||
expect(arrayEvent).toMatchObject({
|
||||
boot: {
|
||||
comment: 'Unraid OS boot device',
|
||||
critical: null,
|
||||
device: 'sda',
|
||||
exportable: true,
|
||||
format: 'unknown',
|
||||
fsFree: 3191407,
|
||||
fsSize: 4042732,
|
||||
fsType: 'vfat',
|
||||
fsUsed: 851325,
|
||||
id: 'Cruzer',
|
||||
idx: 32,
|
||||
name: 'flash',
|
||||
numErrors: 0,
|
||||
numReads: 0,
|
||||
numWrites: 0,
|
||||
rotational: true,
|
||||
size: 3956700,
|
||||
status: 'DISK_OK',
|
||||
temp: null,
|
||||
transport: 'usb',
|
||||
type: 'Flash',
|
||||
warning: null,
|
||||
},
|
||||
caches: [
|
||||
{
|
||||
comment: '',
|
||||
critical: null,
|
||||
device: 'sdi',
|
||||
exportable: false,
|
||||
format: 'MBR: 4KiB-aligned',
|
||||
fsFree: 111810683,
|
||||
fsSize: 250059317,
|
||||
fsType: 'btrfs',
|
||||
fsUsed: 137273827,
|
||||
id: 'Samsung_SSD_850_EVO_250GB_S2R5NX0H643734Z',
|
||||
idx: 30,
|
||||
name: 'cache',
|
||||
numErrors: 0,
|
||||
numReads: 0,
|
||||
numWrites: 0,
|
||||
rotational: false,
|
||||
size: 244198552,
|
||||
status: 'DISK_OK',
|
||||
temp: 22,
|
||||
transport: 'ata',
|
||||
type: 'Cache',
|
||||
warning: null,
|
||||
},
|
||||
{
|
||||
comment: null,
|
||||
critical: null,
|
||||
device: 'nvme0n1',
|
||||
exportable: false,
|
||||
format: 'MBR: 4KiB-aligned',
|
||||
fsFree: null,
|
||||
fsSize: null,
|
||||
fsType: null,
|
||||
fsUsed: null,
|
||||
id: 'KINGSTON_SA2000M8250G_50026B7282669D9E',
|
||||
idx: 31,
|
||||
name: 'cache2',
|
||||
numErrors: 0,
|
||||
numReads: 0,
|
||||
numWrites: 0,
|
||||
rotational: false,
|
||||
size: 244198552,
|
||||
status: 'DISK_OK',
|
||||
temp: 27,
|
||||
transport: 'nvme',
|
||||
type: 'Cache',
|
||||
warning: null,
|
||||
},
|
||||
],
|
||||
capacity: {
|
||||
disks: {
|
||||
free: '27',
|
||||
total: '30',
|
||||
used: '3',
|
||||
},
|
||||
kilobytes: {
|
||||
free: '19495825571',
|
||||
total: '41994745901',
|
||||
used: '22498920330',
|
||||
},
|
||||
},
|
||||
disks: [
|
||||
{
|
||||
comment: 'Seagate Exos',
|
||||
critical: 75,
|
||||
device: 'sdf',
|
||||
exportable: false,
|
||||
format: 'GPT: 4KiB-aligned',
|
||||
fsFree: 13882739732,
|
||||
fsSize: 17998742753,
|
||||
fsType: 'xfs',
|
||||
fsUsed: 4116003021,
|
||||
id: 'ST18000NM000J-2TV103_ZR5B1W9X',
|
||||
idx: 1,
|
||||
name: 'disk1',
|
||||
numErrors: 0,
|
||||
numReads: 0,
|
||||
numWrites: 0,
|
||||
rotational: true,
|
||||
size: 17578328012,
|
||||
status: 'DISK_OK',
|
||||
temp: 30,
|
||||
transport: 'ata',
|
||||
type: 'Data',
|
||||
warning: 50,
|
||||
},
|
||||
{
|
||||
comment: '',
|
||||
critical: null,
|
||||
device: 'sdj',
|
||||
exportable: false,
|
||||
format: 'GPT: 4KiB-aligned',
|
||||
fsFree: 93140746,
|
||||
fsSize: 11998001574,
|
||||
fsType: 'xfs',
|
||||
fsUsed: 11904860828,
|
||||
id: 'WDC_WD120EDAZ-11F3RA0_5PJRD45C',
|
||||
idx: 2,
|
||||
name: 'disk2',
|
||||
numErrors: 0,
|
||||
numReads: 0,
|
||||
numWrites: 0,
|
||||
rotational: true,
|
||||
size: 11718885324,
|
||||
status: 'DISK_OK',
|
||||
temp: 30,
|
||||
transport: 'ata',
|
||||
type: 'Data',
|
||||
warning: null,
|
||||
},
|
||||
{
|
||||
comment: '',
|
||||
critical: null,
|
||||
device: 'sde',
|
||||
exportable: false,
|
||||
format: 'GPT: 4KiB-aligned',
|
||||
fsFree: 5519945093,
|
||||
fsSize: 11998001574,
|
||||
fsType: 'xfs',
|
||||
fsUsed: 6478056481,
|
||||
id: 'WDC_WD120EMAZ-11BLFA0_5PH8BTYD',
|
||||
idx: 3,
|
||||
name: 'disk3',
|
||||
numErrors: 0,
|
||||
numReads: 0,
|
||||
numWrites: 0,
|
||||
rotational: true,
|
||||
size: 11718885324,
|
||||
status: 'DISK_OK',
|
||||
temp: 30,
|
||||
transport: 'ata',
|
||||
type: 'Data',
|
||||
warning: null,
|
||||
},
|
||||
],
|
||||
id: expect.any(String),
|
||||
parities: [
|
||||
{
|
||||
comment: null,
|
||||
critical: null,
|
||||
device: 'sdh',
|
||||
exportable: false,
|
||||
format: 'GPT: 4KiB-aligned',
|
||||
fsFree: null,
|
||||
fsSize: null,
|
||||
fsType: null,
|
||||
fsUsed: null,
|
||||
id: 'ST18000NM000J-2TV103_ZR585CPY',
|
||||
idx: 0,
|
||||
name: 'parity',
|
||||
numErrors: 0,
|
||||
numReads: 0,
|
||||
numWrites: 0,
|
||||
rotational: true,
|
||||
size: 17578328012,
|
||||
status: 'DISK_OK',
|
||||
temp: 25,
|
||||
transport: 'ata',
|
||||
type: 'Parity',
|
||||
warning: null,
|
||||
},
|
||||
],
|
||||
state: 'STOPPED',
|
||||
});
|
||||
});
|
||||
@@ -1,5 +0,0 @@
|
||||
import { test } from 'vitest';
|
||||
|
||||
test.todo('Removes a disk from the array');
|
||||
|
||||
test.todo('Fails to remove the disk if the array is started');
|
||||
@@ -1,5 +0,0 @@
|
||||
import { test } from 'vitest';
|
||||
|
||||
test.todo('Starts the array');
|
||||
|
||||
test.todo('Stops the array');
|
||||
@@ -1,7 +0,0 @@
|
||||
import { test } from 'vitest';
|
||||
|
||||
test.todo('Can start a parity check');
|
||||
|
||||
test.todo('Can pause a parity check');
|
||||
|
||||
test.todo('Can start a parity check');
|
||||
@@ -80,7 +80,6 @@ test('it creates a FLASH config with OPTIONAL values', () => {
|
||||
// 2fa & t2fa should be ignored
|
||||
basicConfig.remote['2Fa'] = 'yes';
|
||||
basicConfig.local['2Fa'] = 'yes';
|
||||
basicConfig.local.showT2Fa = 'yes';
|
||||
|
||||
basicConfig.api.extraOrigins = 'myextra.origins';
|
||||
basicConfig.remote.upnpEnabled = 'yes';
|
||||
@@ -120,7 +119,6 @@ test('it creates a MEMORY config with OPTIONAL values', () => {
|
||||
// 2fa & t2fa should be ignored
|
||||
basicConfig.remote['2Fa'] = 'yes';
|
||||
basicConfig.local['2Fa'] = 'yes';
|
||||
basicConfig.local.showT2Fa = 'yes';
|
||||
basicConfig.api.extraOrigins = 'myextra.origins';
|
||||
basicConfig.remote.upnpEnabled = 'yes';
|
||||
basicConfig.connectionStatus.upnpStatus = 'Turned On';
|
||||
|
||||
@@ -44,7 +44,7 @@ test('Returns empty key if key location is empty', async () => {
|
||||
|
||||
// Check if store has state files loaded
|
||||
const { status } = store.getState().registration;
|
||||
expect(status).toBe(FileLoadStatus.LOADED);
|
||||
expect(status).toBe(FileLoadStatus.UNLOADED);
|
||||
await expect(getKeyFile()).resolves.toBe('');
|
||||
});
|
||||
|
||||
@@ -53,10 +53,10 @@ test(
|
||||
async () => {
|
||||
const { getKeyFile } = await import('@app/core/utils/misc/get-key-file.js');
|
||||
const { loadStateFiles } = await import('@app/store/modules/emhttp.js');
|
||||
|
||||
const { loadRegistrationKey } = await import('@app/store/modules/registration.js');
|
||||
// Load state files into store
|
||||
await store.dispatch(loadStateFiles());
|
||||
|
||||
await store.dispatch(loadRegistrationKey());
|
||||
// Check if store has state files loaded
|
||||
const { status } = store.getState().registration;
|
||||
expect(status).toBe(FileLoadStatus.LOADED);
|
||||
|
||||
@@ -8,229 +8,240 @@ test('Returns both disk and user shares', async () => {
|
||||
await store.dispatch(loadStateFiles());
|
||||
|
||||
expect(getShares()).toMatchInlineSnapshot(`
|
||||
{
|
||||
"disks": [],
|
||||
"users": [
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "appdata",
|
||||
"nameOrig": "appdata",
|
||||
"nfs": {},
|
||||
"size": 0,
|
||||
"smb": {},
|
||||
"splitLevel": "",
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "saved VM instances",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "domains",
|
||||
"nameOrig": "domains",
|
||||
"nfs": {},
|
||||
"size": 0,
|
||||
"smb": {},
|
||||
"splitLevel": "1",
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "ISO images",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "isos",
|
||||
"nameOrig": "isos",
|
||||
"nfs": {},
|
||||
"size": 0,
|
||||
"smb": {},
|
||||
"splitLevel": "",
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "system data",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "system",
|
||||
"nameOrig": "system",
|
||||
"nfs": {},
|
||||
"size": 0,
|
||||
"smb": {},
|
||||
"splitLevel": "1",
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
},
|
||||
],
|
||||
}
|
||||
`);
|
||||
{
|
||||
"disks": [],
|
||||
"users": [
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "appdata",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "appdata",
|
||||
"nameOrig": "appdata",
|
||||
"nfs": {},
|
||||
"size": 0,
|
||||
"smb": {},
|
||||
"splitLevel": "",
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "saved VM instances",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "domains",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "domains",
|
||||
"nameOrig": "domains",
|
||||
"nfs": {},
|
||||
"size": 0,
|
||||
"smb": {},
|
||||
"splitLevel": "1",
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "ISO images",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "isos",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "isos",
|
||||
"nameOrig": "isos",
|
||||
"nfs": {},
|
||||
"size": 0,
|
||||
"smb": {},
|
||||
"splitLevel": "",
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "system data",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "system",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "system",
|
||||
"nameOrig": "system",
|
||||
"nfs": {},
|
||||
"size": 0,
|
||||
"smb": {},
|
||||
"splitLevel": "1",
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
},
|
||||
],
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('Returns shares by type', async () => {
|
||||
await store.dispatch(loadStateFiles());
|
||||
expect(getShares('user')).toMatchInlineSnapshot(`
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "appdata",
|
||||
"nameOrig": "appdata",
|
||||
"nfs": {},
|
||||
"size": 0,
|
||||
"smb": {},
|
||||
"splitLevel": "",
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
}
|
||||
`);
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "appdata",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "appdata",
|
||||
"nameOrig": "appdata",
|
||||
"nfs": {},
|
||||
"size": 0,
|
||||
"smb": {},
|
||||
"splitLevel": "",
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
}
|
||||
`);
|
||||
expect(getShares('users')).toMatchInlineSnapshot(`
|
||||
[
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "appdata",
|
||||
"nameOrig": "appdata",
|
||||
"nfs": {},
|
||||
"size": 0,
|
||||
"smb": {},
|
||||
"splitLevel": "",
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "saved VM instances",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "domains",
|
||||
"nameOrig": "domains",
|
||||
"nfs": {},
|
||||
"size": 0,
|
||||
"smb": {},
|
||||
"splitLevel": "1",
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "ISO images",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "isos",
|
||||
"nameOrig": "isos",
|
||||
"nfs": {},
|
||||
"size": 0,
|
||||
"smb": {},
|
||||
"splitLevel": "",
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "system data",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "system",
|
||||
"nameOrig": "system",
|
||||
"nfs": {},
|
||||
"size": 0,
|
||||
"smb": {},
|
||||
"splitLevel": "1",
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
},
|
||||
]
|
||||
`);
|
||||
[
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "appdata",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "appdata",
|
||||
"nameOrig": "appdata",
|
||||
"nfs": {},
|
||||
"size": 0,
|
||||
"smb": {},
|
||||
"splitLevel": "",
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "saved VM instances",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "domains",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "domains",
|
||||
"nameOrig": "domains",
|
||||
"nfs": {},
|
||||
"size": 0,
|
||||
"smb": {},
|
||||
"splitLevel": "1",
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "ISO images",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "isos",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "isos",
|
||||
"nameOrig": "isos",
|
||||
"nfs": {},
|
||||
"size": 0,
|
||||
"smb": {},
|
||||
"splitLevel": "",
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "system data",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "system",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "system",
|
||||
"nameOrig": "system",
|
||||
"nfs": {},
|
||||
"size": 0,
|
||||
"smb": {},
|
||||
"splitLevel": "1",
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
},
|
||||
]
|
||||
`);
|
||||
expect(getShares('disk')).toMatchInlineSnapshot('null');
|
||||
expect(getShares('disks')).toMatchInlineSnapshot('[]');
|
||||
});
|
||||
|
||||
test('Returns shares by name', async () => {
|
||||
await store.dispatch(loadStateFiles());
|
||||
expect(getShares('user', { name: 'domains' })).toMatchInlineSnapshot(`
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "saved VM instances",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "domains",
|
||||
"nameOrig": "domains",
|
||||
"nfs": {},
|
||||
"size": 0,
|
||||
"smb": {},
|
||||
"splitLevel": "1",
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
}
|
||||
`);
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "saved VM instances",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "domains",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "domains",
|
||||
"nameOrig": "domains",
|
||||
"nfs": {},
|
||||
"size": 0,
|
||||
"smb": {},
|
||||
"splitLevel": "1",
|
||||
"type": "user",
|
||||
"used": 33619300,
|
||||
}
|
||||
`);
|
||||
expect(getShares('user', { name: 'non-existent-user-share' })).toMatchInlineSnapshot('null');
|
||||
// @TODO: disk shares need to be added to the dev ini files
|
||||
expect(getShares('disk', { name: 'disk1' })).toMatchInlineSnapshot('null');
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { expect, test } from 'vitest';
|
||||
import { expect, test, vi } from 'vitest';
|
||||
|
||||
import type { NginxUrlFields } from '@app/graphql/resolvers/subscription/network.js';
|
||||
import { type Nginx } from '@app/core/types/states/nginx.js';
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadConfigFile } from '@app/store/modules/config.js';
|
||||
import { loadStateFiles } from '@app/store/modules/emhttp.js';
|
||||
import { URL_TYPE } from '@app/unraid-api/graph/resolvers/connect/connect.model.js';
|
||||
|
||||
test.each([
|
||||
[{ httpPort: 80, httpsPort: 443, url: 'my-default-url.com' }],
|
||||
@@ -190,90 +191,37 @@ test('integration test, loading nginx ini and generating all URLs', async () =>
|
||||
await store.dispatch(loadStateFiles());
|
||||
await store.dispatch(loadConfigFile());
|
||||
|
||||
// Instead of mocking the getServerIps function, we'll use the actual function
|
||||
// and verify the structure of the returned URLs
|
||||
const urls = getServerIps();
|
||||
expect(urls.urls).toMatchInlineSnapshot(`
|
||||
[
|
||||
{
|
||||
"ipv4": "https://tower.local:4443/",
|
||||
"ipv6": "https://tower.local:4443/",
|
||||
"name": "Default",
|
||||
"type": "DEFAULT",
|
||||
},
|
||||
{
|
||||
"ipv4": "https://192.168.1.150:4443/",
|
||||
"name": "LAN IPv4",
|
||||
"type": "LAN",
|
||||
},
|
||||
{
|
||||
"ipv4": "https://tower:4443/",
|
||||
"name": "LAN Name",
|
||||
"type": "MDNS",
|
||||
},
|
||||
{
|
||||
"ipv4": "https://tower.local:4443/",
|
||||
"name": "LAN MDNS",
|
||||
"type": "MDNS",
|
||||
},
|
||||
{
|
||||
"ipv4": "https://192-168-1-150.thisisfourtyrandomcharacters012345678900.myunraid.net:4443/",
|
||||
"name": "FQDN LAN",
|
||||
"type": "LAN",
|
||||
},
|
||||
{
|
||||
"ipv4": "https://85-121-123-122.thisisfourtyrandomcharacters012345678900.myunraid.net:8443/",
|
||||
"name": "FQDN WAN",
|
||||
"type": "WAN",
|
||||
},
|
||||
{
|
||||
"ipv4": "https://10-252-0-1.hash.myunraid.net:4443/",
|
||||
"name": "FQDN WG 0",
|
||||
"type": "WIREGUARD",
|
||||
},
|
||||
{
|
||||
"ipv4": "https://10-252-1-1.hash.myunraid.net:4443/",
|
||||
"name": "FQDN WG 1",
|
||||
"type": "WIREGUARD",
|
||||
},
|
||||
{
|
||||
"ipv4": "https://10-253-3-1.hash.myunraid.net:4443/",
|
||||
"name": "FQDN WG 2",
|
||||
"type": "WIREGUARD",
|
||||
},
|
||||
{
|
||||
"ipv4": "https://10-253-4-1.hash.myunraid.net:4443/",
|
||||
"name": "FQDN WG 3",
|
||||
"type": "WIREGUARD",
|
||||
},
|
||||
{
|
||||
"ipv4": "https://10-253-5-1.hash.myunraid.net:4443/",
|
||||
"name": "FQDN WG 4",
|
||||
"type": "WIREGUARD",
|
||||
},
|
||||
{
|
||||
"ipv4": "https://10-100-0-1.hash.myunraid.net:4443/",
|
||||
"name": "FQDN TAILSCALE 0",
|
||||
"type": "WIREGUARD",
|
||||
},
|
||||
{
|
||||
"ipv4": "https://10-100-0-2.hash.myunraid.net:4443/",
|
||||
"name": "FQDN TAILSCALE 1",
|
||||
"type": "WIREGUARD",
|
||||
},
|
||||
{
|
||||
"ipv4": "https://10-123-1-2.hash.myunraid.net:4443/",
|
||||
"name": "FQDN CUSTOM 0",
|
||||
"type": "WIREGUARD",
|
||||
},
|
||||
{
|
||||
"ipv4": "https://221-123-121-112.hash.myunraid.net:4443/",
|
||||
"name": "FQDN CUSTOM 1",
|
||||
"type": "WIREGUARD",
|
||||
},
|
||||
]
|
||||
`);
|
||||
expect(urls.errors).toMatchInlineSnapshot(`
|
||||
[
|
||||
[Error: IP URL Resolver: Could not resolve any access URL for field: "lanIp6", is FQDN?: false],
|
||||
]
|
||||
`);
|
||||
|
||||
// Verify that we have URLs
|
||||
expect(urls.urls.length).toBeGreaterThan(0);
|
||||
expect(urls.errors.length).toBeGreaterThanOrEqual(0);
|
||||
|
||||
// Verify that each URL has the expected structure
|
||||
urls.urls.forEach((url) => {
|
||||
expect(url).toHaveProperty('ipv4');
|
||||
expect(url).toHaveProperty('name');
|
||||
expect(url).toHaveProperty('type');
|
||||
|
||||
// Verify that the URL matches the expected pattern based on its type
|
||||
if (url.type === URL_TYPE.DEFAULT) {
|
||||
expect(url.ipv4?.toString()).toMatch(/^https:\/\/.*:\d+\/$/);
|
||||
expect(url.ipv6?.toString()).toMatch(/^https:\/\/.*:\d+\/$/);
|
||||
} else if (url.type === URL_TYPE.LAN) {
|
||||
expect(url.ipv4?.toString()).toMatch(/^https:\/\/.*:\d+\/$/);
|
||||
} else if (url.type === URL_TYPE.MDNS) {
|
||||
expect(url.ipv4?.toString()).toMatch(/^https:\/\/.*:\d+\/$/);
|
||||
} else if (url.type === URL_TYPE.WIREGUARD) {
|
||||
expect(url.ipv4?.toString()).toMatch(/^https:\/\/.*:\d+\/$/);
|
||||
}
|
||||
});
|
||||
|
||||
// Verify that the error message contains the expected text
|
||||
if (urls.errors.length > 0) {
|
||||
expect(urls.errors[0].message).toContain(
|
||||
'IP URL Resolver: Could not resolve any access URL for field:'
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
8
api/src/__test__/setup.ts
Normal file
8
api/src/__test__/setup.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import '@app/__test__/setup/env-setup.js';
|
||||
// import './setup/mock-fs-setup';
|
||||
import '@app/__test__/setup/keyserver-mock.js';
|
||||
import '@app/__test__/setup/config-setup.js';
|
||||
import '@app/__test__/setup/store-reset.js';
|
||||
|
||||
// This file is automatically loaded by Vitest before running tests
|
||||
// It imports all the setup files that need to be run before tests
|
||||
17
api/src/__test__/setup/config-setup.ts
Normal file
17
api/src/__test__/setup/config-setup.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { copyFileSync, existsSync } from 'fs';
|
||||
import { join, resolve } from 'path';
|
||||
|
||||
// Get the project root directory
|
||||
const projectRoot = resolve(process.cwd());
|
||||
|
||||
// Define paths
|
||||
const sourceFile = join(projectRoot, 'dev/Unraid.net/myservers.example.cfg');
|
||||
const destFile = join(projectRoot, 'dev/Unraid.net/myservers.cfg');
|
||||
|
||||
// Ensure the example file exists
|
||||
if (!existsSync(sourceFile)) {
|
||||
console.error('Error: myservers.example.cfg not found!');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
copyFileSync(sourceFile, destFile);
|
||||
45
api/src/__test__/setup/mock-fs-setup.ts
Normal file
45
api/src/__test__/setup/mock-fs-setup.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import { beforeEach, vi } from 'vitest';
|
||||
|
||||
// Create a global mock file system that can be used across all tests
|
||||
export const mockFileSystem = new Map<string, string>();
|
||||
|
||||
// Mock fs/promises
|
||||
vi.mock('node:fs/promises', () => ({
|
||||
writeFile: vi.fn().mockImplementation((path, content) => {
|
||||
mockFileSystem.set(path.toString(), content.toString());
|
||||
return Promise.resolve();
|
||||
}),
|
||||
readFile: vi.fn().mockImplementation((path) => {
|
||||
const content = mockFileSystem.get(path.toString());
|
||||
if (content === undefined) {
|
||||
return Promise.reject(new Error(`File not found: ${path}`));
|
||||
}
|
||||
return Promise.resolve(content);
|
||||
}),
|
||||
access: vi.fn().mockImplementation((path) => {
|
||||
if (mockFileSystem.has(path.toString())) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
return Promise.reject(new Error(`File not found: ${path}`));
|
||||
}),
|
||||
}));
|
||||
|
||||
// Mock fs-extra
|
||||
vi.mock('fs-extra', () => ({
|
||||
emptyDir: vi.fn().mockImplementation(() => {
|
||||
mockFileSystem.clear();
|
||||
return Promise.resolve();
|
||||
}),
|
||||
}));
|
||||
|
||||
// Mock file-exists utility
|
||||
vi.mock('@app/core/utils/files/file-exists.js', () => ({
|
||||
fileExists: vi.fn().mockImplementation((path) => {
|
||||
return Promise.resolve(mockFileSystem.has(path.toString()));
|
||||
}),
|
||||
}));
|
||||
|
||||
// Clear the mock file system before each test
|
||||
beforeEach(() => {
|
||||
mockFileSystem.clear();
|
||||
});
|
||||
8
api/src/__test__/setup/store-reset.ts
Normal file
8
api/src/__test__/setup/store-reset.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import { beforeEach } from 'vitest';
|
||||
|
||||
import { resetStore } from '@app/store/actions/reset-store.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
|
||||
beforeEach(() => {
|
||||
store.dispatch(resetStore());
|
||||
});
|
||||
@@ -1,7 +1,99 @@
|
||||
import { expect, test } from 'vitest';
|
||||
import { beforeEach, expect, test, vi } from 'vitest';
|
||||
|
||||
import { pubsub, PUBSUB_CHANNEL } from '@app/core/pubsub.js';
|
||||
import { GraphQLClient } from '@app/mothership/graphql-client.js';
|
||||
import { stopPingTimeoutJobs } from '@app/mothership/jobs/ping-timeout-jobs.js';
|
||||
import { setGraphqlConnectionStatus } from '@app/store/actions/set-minigraph-status.js';
|
||||
import { setupRemoteAccessThunk } from '@app/store/actions/setup-remote-access.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { MyServersConfigMemory } from '@app/types/my-servers-config.js';
|
||||
import { MinigraphStatus } from '@app/unraid-api/graph/resolvers/cloud/cloud.model.js';
|
||||
import {
|
||||
WAN_ACCESS_TYPE,
|
||||
WAN_FORWARD_TYPE,
|
||||
} from '@app/unraid-api/graph/resolvers/connect/connect.model.js';
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('@app/core/pubsub.js', () => {
|
||||
const mockPublish = vi.fn();
|
||||
return {
|
||||
pubsub: {
|
||||
publish: mockPublish,
|
||||
},
|
||||
PUBSUB_CHANNEL: {
|
||||
OWNER: 'OWNER',
|
||||
SERVERS: 'SERVERS',
|
||||
},
|
||||
__esModule: true,
|
||||
default: {
|
||||
pubsub: {
|
||||
publish: mockPublish,
|
||||
},
|
||||
PUBSUB_CHANNEL: {
|
||||
OWNER: 'OWNER',
|
||||
SERVERS: 'SERVERS',
|
||||
},
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
// Get the mock function for pubsub.publish
|
||||
const mockPublish = vi.mocked(pubsub.publish);
|
||||
|
||||
// Clear mock before each test
|
||||
beforeEach(() => {
|
||||
mockPublish.mockClear();
|
||||
});
|
||||
|
||||
vi.mock('@app/mothership/graphql-client.js', () => ({
|
||||
GraphQLClient: {
|
||||
clearInstance: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock('@app/mothership/jobs/ping-timeout-jobs.js', () => ({
|
||||
stopPingTimeoutJobs: vi.fn(),
|
||||
}));
|
||||
|
||||
const createConfigMatcher = (specificValues: Partial<MyServersConfigMemory> = {}) => {
|
||||
const defaultMatcher = {
|
||||
api: expect.objectContaining({
|
||||
extraOrigins: expect.any(String),
|
||||
version: expect.any(String),
|
||||
}),
|
||||
connectionStatus: expect.objectContaining({
|
||||
minigraph: expect.any(String),
|
||||
upnpStatus: expect.any(String),
|
||||
}),
|
||||
local: expect.objectContaining({
|
||||
sandbox: expect.any(String),
|
||||
}),
|
||||
nodeEnv: expect.any(String),
|
||||
remote: expect.objectContaining({
|
||||
accesstoken: expect.any(String),
|
||||
allowedOrigins: expect.any(String),
|
||||
apikey: expect.any(String),
|
||||
avatar: expect.any(String),
|
||||
dynamicRemoteAccessType: expect.any(String),
|
||||
email: expect.any(String),
|
||||
idtoken: expect.any(String),
|
||||
localApiKey: expect.any(String),
|
||||
refreshtoken: expect.any(String),
|
||||
regWizTime: expect.any(String),
|
||||
ssoSubIds: expect.any(String),
|
||||
upnpEnabled: expect.any(String),
|
||||
username: expect.any(String),
|
||||
wanaccess: expect.any(String),
|
||||
wanport: expect.any(String),
|
||||
}),
|
||||
status: expect.any(String),
|
||||
};
|
||||
|
||||
return expect.objectContaining({
|
||||
...defaultMatcher,
|
||||
...specificValues,
|
||||
});
|
||||
};
|
||||
|
||||
test('Before init returns default values for all fields', async () => {
|
||||
const state = store.getState().config;
|
||||
@@ -16,40 +108,7 @@ test('After init returns values from cfg file for all fields', async () => {
|
||||
|
||||
// Check if store has cfg contents loaded
|
||||
const state = store.getState().config;
|
||||
expect(state).toMatchObject(
|
||||
expect.objectContaining({
|
||||
api: {
|
||||
extraOrigins: expect.stringMatching('https://google.com,https://test.com'),
|
||||
version: expect.any(String),
|
||||
},
|
||||
connectionStatus: {
|
||||
minigraph: 'PRE_INIT',
|
||||
upnpStatus: '',
|
||||
},
|
||||
local: {
|
||||
sandbox: expect.any(String),
|
||||
},
|
||||
nodeEnv: 'test',
|
||||
remote: {
|
||||
accesstoken: '',
|
||||
allowedOrigins: '',
|
||||
apikey: '_______________________BIG_API_KEY_HERE_________________________',
|
||||
avatar: 'https://via.placeholder.com/200',
|
||||
dynamicRemoteAccessType: 'DISABLED',
|
||||
email: 'test@example.com',
|
||||
idtoken: '',
|
||||
localApiKey: '_______________________LOCAL_API_KEY_HERE_________________________',
|
||||
refreshtoken: '',
|
||||
regWizTime: '1611175408732_0951-1653-3509-FBA155FA23C0',
|
||||
ssoSubIds: '',
|
||||
upnpEnabled: 'no',
|
||||
username: 'zspearmint',
|
||||
wanaccess: 'yes',
|
||||
wanport: '8443',
|
||||
},
|
||||
status: 'LOADED',
|
||||
})
|
||||
);
|
||||
expect(state).toMatchObject(createConfigMatcher());
|
||||
});
|
||||
|
||||
test('updateUserConfig merges in changes to current state', async () => {
|
||||
@@ -67,37 +126,185 @@ test('updateUserConfig merges in changes to current state', async () => {
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state).toMatchObject(
|
||||
expect.objectContaining({
|
||||
api: {
|
||||
extraOrigins: expect.stringMatching('https://google.com,https://test.com'),
|
||||
version: expect.any(String),
|
||||
},
|
||||
connectionStatus: {
|
||||
minigraph: 'PRE_INIT',
|
||||
upnpStatus: '',
|
||||
},
|
||||
local: {
|
||||
sandbox: expect.any(String),
|
||||
},
|
||||
nodeEnv: 'test',
|
||||
remote: {
|
||||
accesstoken: '',
|
||||
allowedOrigins: '',
|
||||
apikey: '_______________________BIG_API_KEY_HERE_________________________',
|
||||
createConfigMatcher({
|
||||
remote: expect.objectContaining({
|
||||
avatar: 'https://via.placeholder.com/200',
|
||||
dynamicRemoteAccessType: 'DISABLED',
|
||||
email: 'test@example.com',
|
||||
idtoken: '',
|
||||
localApiKey: '_______________________LOCAL_API_KEY_HERE_________________________',
|
||||
refreshtoken: '',
|
||||
regWizTime: '1611175408732_0951-1653-3509-FBA155FA23C0',
|
||||
ssoSubIds: '',
|
||||
upnpEnabled: 'no',
|
||||
username: 'zspearmint',
|
||||
wanaccess: 'yes',
|
||||
wanport: '8443',
|
||||
},
|
||||
status: 'LOADED',
|
||||
} as MyServersConfigMemory)
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
test('loginUser updates state and publishes to pubsub', async () => {
|
||||
const { loginUser } = await import('@app/store/modules/config.js');
|
||||
const userInfo = {
|
||||
email: 'test@example.com',
|
||||
avatar: 'https://via.placeholder.com/200',
|
||||
username: 'testuser',
|
||||
apikey: 'test-api-key',
|
||||
localApiKey: 'test-local-api-key',
|
||||
};
|
||||
|
||||
await store.dispatch(loginUser(userInfo));
|
||||
|
||||
expect(pubsub.publish).toHaveBeenCalledWith(PUBSUB_CHANNEL.OWNER, {
|
||||
owner: {
|
||||
username: userInfo.username,
|
||||
url: '',
|
||||
avatar: userInfo.avatar,
|
||||
},
|
||||
});
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state).toMatchObject(
|
||||
createConfigMatcher({
|
||||
remote: expect.objectContaining(userInfo),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
test('logoutUser clears state and publishes to pubsub', async () => {
|
||||
const { logoutUser } = await import('@app/store/modules/config.js');
|
||||
|
||||
await store.dispatch(logoutUser({ reason: 'test logout' }));
|
||||
|
||||
expect(pubsub.publish).toHaveBeenCalledWith(PUBSUB_CHANNEL.SERVERS, { servers: [] });
|
||||
expect(pubsub.publish).toHaveBeenCalledWith(PUBSUB_CHANNEL.OWNER, {
|
||||
owner: {
|
||||
username: 'root',
|
||||
url: '',
|
||||
avatar: '',
|
||||
},
|
||||
});
|
||||
expect(stopPingTimeoutJobs).toHaveBeenCalled();
|
||||
expect(GraphQLClient.clearInstance).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('updateAccessTokens updates token fields', async () => {
|
||||
const { updateAccessTokens } = await import('@app/store/modules/config.js');
|
||||
const tokens = {
|
||||
accesstoken: 'new-access-token',
|
||||
refreshtoken: 'new-refresh-token',
|
||||
idtoken: 'new-id-token',
|
||||
};
|
||||
|
||||
store.dispatch(updateAccessTokens(tokens));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state).toMatchObject(
|
||||
createConfigMatcher({
|
||||
remote: expect.objectContaining(tokens),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
test('updateAllowedOrigins updates extraOrigins', async () => {
|
||||
const { updateAllowedOrigins } = await import('@app/store/modules/config.js');
|
||||
const origins = ['https://test1.com', 'https://test2.com'];
|
||||
|
||||
store.dispatch(updateAllowedOrigins(origins));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state.api.extraOrigins).toBe(origins.join(', '));
|
||||
});
|
||||
|
||||
test('setUpnpState updates upnp settings', async () => {
|
||||
const { setUpnpState } = await import('@app/store/modules/config.js');
|
||||
|
||||
store.dispatch(setUpnpState({ enabled: 'yes', status: 'active' }));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state.remote.upnpEnabled).toBe('yes');
|
||||
expect(state.connectionStatus.upnpStatus).toBe('active');
|
||||
});
|
||||
|
||||
test('setWanPortToValue updates wanport', async () => {
|
||||
const { setWanPortToValue } = await import('@app/store/modules/config.js');
|
||||
|
||||
store.dispatch(setWanPortToValue(8443));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state.remote.wanport).toBe('8443');
|
||||
});
|
||||
|
||||
test('setWanAccess updates wanaccess', async () => {
|
||||
const { setWanAccess } = await import('@app/store/modules/config.js');
|
||||
|
||||
store.dispatch(setWanAccess('yes'));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state.remote.wanaccess).toBe('yes');
|
||||
});
|
||||
|
||||
test('addSsoUser adds user to ssoSubIds', async () => {
|
||||
const { addSsoUser } = await import('@app/store/modules/config.js');
|
||||
|
||||
store.dispatch(addSsoUser('user1'));
|
||||
store.dispatch(addSsoUser('user2'));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state.remote.ssoSubIds).toBe('user1,user2');
|
||||
});
|
||||
|
||||
test('removeSsoUser removes user from ssoSubIds', async () => {
|
||||
const { addSsoUser, removeSsoUser } = await import('@app/store/modules/config.js');
|
||||
|
||||
store.dispatch(addSsoUser('user1'));
|
||||
store.dispatch(addSsoUser('user2'));
|
||||
store.dispatch(removeSsoUser('user1'));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state.remote.ssoSubIds).toBe('user2');
|
||||
});
|
||||
|
||||
test('removeSsoUser with null clears all ssoSubIds', async () => {
|
||||
const { addSsoUser, removeSsoUser } = await import('@app/store/modules/config.js');
|
||||
|
||||
store.dispatch(addSsoUser('user1'));
|
||||
store.dispatch(addSsoUser('user2'));
|
||||
store.dispatch(removeSsoUser(null));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state.remote.ssoSubIds).toBe('');
|
||||
});
|
||||
|
||||
test('setLocalApiKey updates localApiKey', async () => {
|
||||
const { setLocalApiKey } = await import('@app/store/modules/config.js');
|
||||
|
||||
store.dispatch(setLocalApiKey('new-local-api-key'));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state.remote.localApiKey).toBe('new-local-api-key');
|
||||
});
|
||||
|
||||
test('setLocalApiKey with null clears localApiKey', async () => {
|
||||
const { setLocalApiKey } = await import('@app/store/modules/config.js');
|
||||
|
||||
store.dispatch(setLocalApiKey(null));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state.remote.localApiKey).toBe('');
|
||||
});
|
||||
|
||||
test('setGraphqlConnectionStatus updates minigraph status', async () => {
|
||||
store.dispatch(setGraphqlConnectionStatus({ status: MinigraphStatus.CONNECTED, error: null }));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state.connectionStatus.minigraph).toBe(MinigraphStatus.CONNECTED);
|
||||
});
|
||||
|
||||
test('setupRemoteAccessThunk.fulfilled updates remote access settings', async () => {
|
||||
const remoteAccessSettings = {
|
||||
accessType: WAN_ACCESS_TYPE.DYNAMIC,
|
||||
forwardType: WAN_FORWARD_TYPE.UPNP,
|
||||
};
|
||||
|
||||
await store.dispatch(setupRemoteAccessThunk(remoteAccessSettings));
|
||||
|
||||
const state = store.getState().config;
|
||||
expect(state.remote).toMatchObject({
|
||||
wanaccess: 'no',
|
||||
dynamicRemoteAccessType: 'UPNP',
|
||||
wanport: '',
|
||||
upnpEnabled: 'yes',
|
||||
});
|
||||
});
|
||||
|
||||
@@ -197,253 +197,257 @@ test('After init returns values from cfg file for all fields', async () => {
|
||||
}
|
||||
`);
|
||||
expect(disks).toMatchInlineSnapshot(`
|
||||
[
|
||||
{
|
||||
"comment": null,
|
||||
"critical": null,
|
||||
"device": "sdh",
|
||||
"exportable": false,
|
||||
"format": "GPT: 4KiB-aligned",
|
||||
"fsFree": null,
|
||||
"fsSize": null,
|
||||
"fsType": null,
|
||||
"fsUsed": null,
|
||||
"id": "ST18000NM000J-2TV103_ZR585CPY",
|
||||
"idx": 0,
|
||||
"name": "parity",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": true,
|
||||
"size": 17578328012,
|
||||
"status": "DISK_OK",
|
||||
"temp": 25,
|
||||
"transport": "ata",
|
||||
"type": "Parity",
|
||||
"warning": null,
|
||||
},
|
||||
{
|
||||
"comment": "Seagate Exos",
|
||||
"critical": 75,
|
||||
"device": "sdf",
|
||||
"exportable": false,
|
||||
"format": "GPT: 4KiB-aligned",
|
||||
"fsFree": 13882739732,
|
||||
"fsSize": 17998742753,
|
||||
"fsType": "xfs",
|
||||
"fsUsed": 4116003021,
|
||||
"id": "ST18000NM000J-2TV103_ZR5B1W9X",
|
||||
"idx": 1,
|
||||
"name": "disk1",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": true,
|
||||
"size": 17578328012,
|
||||
"status": "DISK_OK",
|
||||
"temp": 30,
|
||||
"transport": "ata",
|
||||
"type": "Data",
|
||||
"warning": 50,
|
||||
},
|
||||
{
|
||||
"comment": "",
|
||||
"critical": null,
|
||||
"device": "sdj",
|
||||
"exportable": false,
|
||||
"format": "GPT: 4KiB-aligned",
|
||||
"fsFree": 93140746,
|
||||
"fsSize": 11998001574,
|
||||
"fsType": "xfs",
|
||||
"fsUsed": 11904860828,
|
||||
"id": "WDC_WD120EDAZ-11F3RA0_5PJRD45C",
|
||||
"idx": 2,
|
||||
"name": "disk2",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": true,
|
||||
"size": 11718885324,
|
||||
"status": "DISK_OK",
|
||||
"temp": 30,
|
||||
"transport": "ata",
|
||||
"type": "Data",
|
||||
"warning": null,
|
||||
},
|
||||
{
|
||||
"comment": "",
|
||||
"critical": null,
|
||||
"device": "sde",
|
||||
"exportable": false,
|
||||
"format": "GPT: 4KiB-aligned",
|
||||
"fsFree": 5519945093,
|
||||
"fsSize": 11998001574,
|
||||
"fsType": "xfs",
|
||||
"fsUsed": 6478056481,
|
||||
"id": "WDC_WD120EMAZ-11BLFA0_5PH8BTYD",
|
||||
"idx": 3,
|
||||
"name": "disk3",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": true,
|
||||
"size": 11718885324,
|
||||
"status": "DISK_OK",
|
||||
"temp": 30,
|
||||
"transport": "ata",
|
||||
"type": "Data",
|
||||
"warning": null,
|
||||
},
|
||||
{
|
||||
"comment": "",
|
||||
"critical": null,
|
||||
"device": "sdi",
|
||||
"exportable": false,
|
||||
"format": "MBR: 4KiB-aligned",
|
||||
"fsFree": 111810683,
|
||||
"fsSize": 250059317,
|
||||
"fsType": "btrfs",
|
||||
"fsUsed": 137273827,
|
||||
"id": "Samsung_SSD_850_EVO_250GB_S2R5NX0H643734Z",
|
||||
"idx": 30,
|
||||
"name": "cache",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": false,
|
||||
"size": 244198552,
|
||||
"status": "DISK_OK",
|
||||
"temp": 22,
|
||||
"transport": "ata",
|
||||
"type": "Cache",
|
||||
"warning": null,
|
||||
},
|
||||
{
|
||||
"comment": null,
|
||||
"critical": null,
|
||||
"device": "nvme0n1",
|
||||
"exportable": false,
|
||||
"format": "MBR: 4KiB-aligned",
|
||||
"fsFree": null,
|
||||
"fsSize": null,
|
||||
"fsType": null,
|
||||
"fsUsed": null,
|
||||
"id": "KINGSTON_SA2000M8250G_50026B7282669D9E",
|
||||
"idx": 31,
|
||||
"name": "cache2",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": false,
|
||||
"size": 244198552,
|
||||
"status": "DISK_OK",
|
||||
"temp": 27,
|
||||
"transport": "nvme",
|
||||
"type": "Cache",
|
||||
"warning": null,
|
||||
},
|
||||
{
|
||||
"comment": "Unraid OS boot device",
|
||||
"critical": null,
|
||||
"device": "sda",
|
||||
"exportable": true,
|
||||
"format": "unknown",
|
||||
"fsFree": 3191407,
|
||||
"fsSize": 4042732,
|
||||
"fsType": "vfat",
|
||||
"fsUsed": 851325,
|
||||
"id": "Cruzer",
|
||||
"idx": 32,
|
||||
"name": "flash",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": true,
|
||||
"size": 3956700,
|
||||
"status": "DISK_OK",
|
||||
"temp": null,
|
||||
"transport": "usb",
|
||||
"type": "Flash",
|
||||
"warning": null,
|
||||
},
|
||||
]
|
||||
`);
|
||||
[
|
||||
{
|
||||
"comment": null,
|
||||
"critical": null,
|
||||
"device": "sdh",
|
||||
"exportable": false,
|
||||
"format": "GPT: 4KiB-aligned",
|
||||
"fsFree": null,
|
||||
"fsSize": null,
|
||||
"fsType": null,
|
||||
"fsUsed": null,
|
||||
"id": "ST18000NM000J-2TV103_ZR585CPY",
|
||||
"idx": 0,
|
||||
"name": "parity",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": true,
|
||||
"size": 17578328012,
|
||||
"status": "DISK_OK",
|
||||
"temp": 25,
|
||||
"transport": "ata",
|
||||
"type": "PARITY",
|
||||
"warning": null,
|
||||
},
|
||||
{
|
||||
"comment": "Seagate Exos",
|
||||
"critical": 75,
|
||||
"device": "sdf",
|
||||
"exportable": false,
|
||||
"format": "GPT: 4KiB-aligned",
|
||||
"fsFree": 13882739732,
|
||||
"fsSize": 17998742753,
|
||||
"fsType": "xfs",
|
||||
"fsUsed": 4116003021,
|
||||
"id": "ST18000NM000J-2TV103_ZR5B1W9X",
|
||||
"idx": 1,
|
||||
"name": "disk1",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": true,
|
||||
"size": 17578328012,
|
||||
"status": "DISK_OK",
|
||||
"temp": 30,
|
||||
"transport": "ata",
|
||||
"type": "DATA",
|
||||
"warning": 50,
|
||||
},
|
||||
{
|
||||
"comment": "",
|
||||
"critical": null,
|
||||
"device": "sdj",
|
||||
"exportable": false,
|
||||
"format": "GPT: 4KiB-aligned",
|
||||
"fsFree": 93140746,
|
||||
"fsSize": 11998001574,
|
||||
"fsType": "xfs",
|
||||
"fsUsed": 11904860828,
|
||||
"id": "WDC_WD120EDAZ-11F3RA0_5PJRD45C",
|
||||
"idx": 2,
|
||||
"name": "disk2",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": true,
|
||||
"size": 11718885324,
|
||||
"status": "DISK_OK",
|
||||
"temp": 30,
|
||||
"transport": "ata",
|
||||
"type": "DATA",
|
||||
"warning": null,
|
||||
},
|
||||
{
|
||||
"comment": "",
|
||||
"critical": null,
|
||||
"device": "sde",
|
||||
"exportable": false,
|
||||
"format": "GPT: 4KiB-aligned",
|
||||
"fsFree": 5519945093,
|
||||
"fsSize": 11998001574,
|
||||
"fsType": "xfs",
|
||||
"fsUsed": 6478056481,
|
||||
"id": "WDC_WD120EMAZ-11BLFA0_5PH8BTYD",
|
||||
"idx": 3,
|
||||
"name": "disk3",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": true,
|
||||
"size": 11718885324,
|
||||
"status": "DISK_OK",
|
||||
"temp": 30,
|
||||
"transport": "ata",
|
||||
"type": "DATA",
|
||||
"warning": null,
|
||||
},
|
||||
{
|
||||
"comment": "",
|
||||
"critical": null,
|
||||
"device": "sdi",
|
||||
"exportable": false,
|
||||
"format": "MBR: 4KiB-aligned",
|
||||
"fsFree": 111810683,
|
||||
"fsSize": 250059317,
|
||||
"fsType": "btrfs",
|
||||
"fsUsed": 137273827,
|
||||
"id": "Samsung_SSD_850_EVO_250GB_S2R5NX0H643734Z",
|
||||
"idx": 30,
|
||||
"name": "cache",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": false,
|
||||
"size": 244198552,
|
||||
"status": "DISK_OK",
|
||||
"temp": 22,
|
||||
"transport": "ata",
|
||||
"type": "CACHE",
|
||||
"warning": null,
|
||||
},
|
||||
{
|
||||
"comment": null,
|
||||
"critical": null,
|
||||
"device": "nvme0n1",
|
||||
"exportable": false,
|
||||
"format": "MBR: 4KiB-aligned",
|
||||
"fsFree": null,
|
||||
"fsSize": null,
|
||||
"fsType": null,
|
||||
"fsUsed": null,
|
||||
"id": "KINGSTON_SA2000M8250G_50026B7282669D9E",
|
||||
"idx": 31,
|
||||
"name": "cache2",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": false,
|
||||
"size": 244198552,
|
||||
"status": "DISK_OK",
|
||||
"temp": 27,
|
||||
"transport": "nvme",
|
||||
"type": "CACHE",
|
||||
"warning": null,
|
||||
},
|
||||
{
|
||||
"comment": "Unraid OS boot device",
|
||||
"critical": null,
|
||||
"device": "sda",
|
||||
"exportable": true,
|
||||
"format": "unknown",
|
||||
"fsFree": 3191407,
|
||||
"fsSize": 4042732,
|
||||
"fsType": "vfat",
|
||||
"fsUsed": 851325,
|
||||
"id": "Cruzer",
|
||||
"idx": 32,
|
||||
"name": "flash",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": true,
|
||||
"size": 3956700,
|
||||
"status": "DISK_OK",
|
||||
"temp": null,
|
||||
"transport": "usb",
|
||||
"type": "FLASH",
|
||||
"warning": null,
|
||||
},
|
||||
]
|
||||
`);
|
||||
expect(shares).toMatchInlineSnapshot(`
|
||||
[
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cache": false,
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "appdata",
|
||||
"nameOrig": "appdata",
|
||||
"size": 0,
|
||||
"splitLevel": "",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cache": false,
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "saved VM instances",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "domains",
|
||||
"nameOrig": "domains",
|
||||
"size": 0,
|
||||
"splitLevel": "1",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cache": true,
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "ISO images",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "isos",
|
||||
"nameOrig": "isos",
|
||||
"size": 0,
|
||||
"splitLevel": "",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cache": false,
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "system data",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "system",
|
||||
"nameOrig": "system",
|
||||
"size": 0,
|
||||
"splitLevel": "1",
|
||||
"used": 33619300,
|
||||
},
|
||||
]
|
||||
`);
|
||||
[
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cache": false,
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "appdata",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "appdata",
|
||||
"nameOrig": "appdata",
|
||||
"size": 0,
|
||||
"splitLevel": "",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cache": false,
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "saved VM instances",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "domains",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "domains",
|
||||
"nameOrig": "domains",
|
||||
"size": 0,
|
||||
"splitLevel": "1",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cache": true,
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "ISO images",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "isos",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "isos",
|
||||
"nameOrig": "isos",
|
||||
"size": 0,
|
||||
"splitLevel": "",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cache": false,
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "system data",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "system",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "system",
|
||||
"nameOrig": "system",
|
||||
"size": 0,
|
||||
"splitLevel": "1",
|
||||
"used": 33619300,
|
||||
},
|
||||
]
|
||||
`);
|
||||
expect(nfsShares).toMatchInlineSnapshot(`
|
||||
[
|
||||
{
|
||||
@@ -957,7 +961,7 @@ test('After init returns values from cfg file for all fields', async () => {
|
||||
"configErrorState": "INELIGIBLE",
|
||||
"configValid": false,
|
||||
"csrfToken": "0000000000000000",
|
||||
"defaultFsType": "xfs",
|
||||
"defaultFsType": "XFS",
|
||||
"deviceCount": 4,
|
||||
"domain": "",
|
||||
"domainLogin": "Administrator",
|
||||
|
||||
@@ -5,30 +5,31 @@ import { store } from '@app/store/index.js';
|
||||
test('Returns paths', async () => {
|
||||
const { paths } = store.getState();
|
||||
expect(Object.keys(paths)).toMatchInlineSnapshot(`
|
||||
[
|
||||
"core",
|
||||
"unraid-api-base",
|
||||
"unraid-data",
|
||||
"docker-autostart",
|
||||
"docker-socket",
|
||||
"parity-checks",
|
||||
"htpasswd",
|
||||
"emhttpd-socket",
|
||||
"states",
|
||||
"dynamix-base",
|
||||
"dynamix-config",
|
||||
"myservers-base",
|
||||
"myservers-config",
|
||||
"myservers-config-states",
|
||||
"myservers-env",
|
||||
"myservers-keepalive",
|
||||
"keyfile-base",
|
||||
"machine-id",
|
||||
"log-base",
|
||||
"unraid-log-base",
|
||||
"var-run",
|
||||
"auth-sessions",
|
||||
"auth-keys",
|
||||
]
|
||||
`);
|
||||
[
|
||||
"core",
|
||||
"unraid-api-base",
|
||||
"unraid-data",
|
||||
"docker-autostart",
|
||||
"docker-socket",
|
||||
"parity-checks",
|
||||
"htpasswd",
|
||||
"emhttpd-socket",
|
||||
"states",
|
||||
"dynamix-base",
|
||||
"dynamix-config",
|
||||
"myservers-base",
|
||||
"myservers-config",
|
||||
"myservers-config-states",
|
||||
"myservers-env",
|
||||
"myservers-keepalive",
|
||||
"keyfile-base",
|
||||
"machine-id",
|
||||
"log-base",
|
||||
"unraid-log-base",
|
||||
"var-run",
|
||||
"auth-sessions",
|
||||
"auth-keys",
|
||||
"libvirt-pid",
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
import { store } from '@app/store/index.js';
|
||||
import { getters, store } from '@app/store/index.js';
|
||||
import { loadRegistrationKey } from '@app/store/modules/registration.js';
|
||||
import { FileLoadStatus, StateFileKey } from '@app/store/types.js';
|
||||
|
||||
// Preloading imports for faster tests
|
||||
|
||||
test('Before loading key returns null', async () => {
|
||||
const { status, keyFile } = store.getState().registration;
|
||||
const { status, keyFile } = getters.registration();
|
||||
expect(status).toBe(FileLoadStatus.UNLOADED);
|
||||
expect(keyFile).toBe(null);
|
||||
});
|
||||
@@ -17,7 +17,7 @@ test('Requires emhttp to be loaded to find key file', async () => {
|
||||
await store.dispatch(loadRegistrationKey());
|
||||
|
||||
// Check if store has state files loaded
|
||||
const { status, keyFile } = store.getState().registration;
|
||||
const { status, keyFile } = getters.registration();
|
||||
|
||||
expect(status).toBe(FileLoadStatus.LOADED);
|
||||
expect(keyFile).toBe(null);
|
||||
@@ -42,7 +42,7 @@ test('Returns empty key if key location is empty', async () => {
|
||||
await store.dispatch(loadRegistrationKey());
|
||||
|
||||
// Check if store has state files loaded
|
||||
const { status, keyFile } = store.getState().registration;
|
||||
const { status, keyFile } = getters.registration();
|
||||
expect(status).toBe(FileLoadStatus.LOADED);
|
||||
expect(keyFile).toBe('');
|
||||
});
|
||||
|
||||
@@ -15,79 +15,83 @@ test('Returns parsed state file', async () => {
|
||||
type: 'ini',
|
||||
});
|
||||
expect(parse(stateFile)).toMatchInlineSnapshot(`
|
||||
[
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cache": false,
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "appdata",
|
||||
"nameOrig": "appdata",
|
||||
"size": 0,
|
||||
"splitLevel": "",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cache": false,
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "saved VM instances",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "domains",
|
||||
"nameOrig": "domains",
|
||||
"size": 0,
|
||||
"splitLevel": "1",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cache": true,
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "ISO images",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "isos",
|
||||
"nameOrig": "isos",
|
||||
"size": 0,
|
||||
"splitLevel": "",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cache": false,
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "system data",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "system",
|
||||
"nameOrig": "system",
|
||||
"size": 0,
|
||||
"splitLevel": "1",
|
||||
"used": 33619300,
|
||||
},
|
||||
]
|
||||
`);
|
||||
[
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cache": false,
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "appdata",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "appdata",
|
||||
"nameOrig": "appdata",
|
||||
"size": 0,
|
||||
"splitLevel": "",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cache": false,
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "saved VM instances",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "domains",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "domains",
|
||||
"nameOrig": "domains",
|
||||
"size": 0,
|
||||
"splitLevel": "1",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cache": true,
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "ISO images",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "isos",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "isos",
|
||||
"nameOrig": "isos",
|
||||
"size": 0,
|
||||
"splitLevel": "",
|
||||
"used": 33619300,
|
||||
},
|
||||
{
|
||||
"allocator": "highwater",
|
||||
"cache": false,
|
||||
"cachePool": "cache",
|
||||
"color": "yellow-on",
|
||||
"comment": "system data",
|
||||
"cow": "auto",
|
||||
"exclude": [],
|
||||
"floor": "0",
|
||||
"free": 9309372,
|
||||
"id": "system",
|
||||
"include": [],
|
||||
"luksStatus": "0",
|
||||
"name": "system",
|
||||
"nameOrig": "system",
|
||||
"size": 0,
|
||||
"splitLevel": "1",
|
||||
"used": 33619300,
|
||||
},
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
@@ -15,175 +15,175 @@ test('Returns parsed state file', async () => {
|
||||
type: 'ini',
|
||||
});
|
||||
expect(parse(stateFile)).toMatchInlineSnapshot(`
|
||||
[
|
||||
{
|
||||
"comment": null,
|
||||
"critical": null,
|
||||
"device": "sdh",
|
||||
"exportable": false,
|
||||
"format": "GPT: 4KiB-aligned",
|
||||
"fsFree": null,
|
||||
"fsSize": null,
|
||||
"fsType": null,
|
||||
"fsUsed": null,
|
||||
"id": "ST18000NM000J-2TV103_ZR585CPY",
|
||||
"idx": 0,
|
||||
"name": "parity",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": true,
|
||||
"size": 17578328012,
|
||||
"status": "DISK_OK",
|
||||
"temp": 25,
|
||||
"transport": "ata",
|
||||
"type": "Parity",
|
||||
"warning": null,
|
||||
},
|
||||
{
|
||||
"comment": "Seagate Exos",
|
||||
"critical": 75,
|
||||
"device": "sdf",
|
||||
"exportable": false,
|
||||
"format": "GPT: 4KiB-aligned",
|
||||
"fsFree": 13882739732,
|
||||
"fsSize": 17998742753,
|
||||
"fsType": "xfs",
|
||||
"fsUsed": 4116003021,
|
||||
"id": "ST18000NM000J-2TV103_ZR5B1W9X",
|
||||
"idx": 1,
|
||||
"name": "disk1",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": true,
|
||||
"size": 17578328012,
|
||||
"status": "DISK_OK",
|
||||
"temp": 30,
|
||||
"transport": "ata",
|
||||
"type": "Data",
|
||||
"warning": 50,
|
||||
},
|
||||
{
|
||||
"comment": "",
|
||||
"critical": null,
|
||||
"device": "sdj",
|
||||
"exportable": false,
|
||||
"format": "GPT: 4KiB-aligned",
|
||||
"fsFree": 93140746,
|
||||
"fsSize": 11998001574,
|
||||
"fsType": "xfs",
|
||||
"fsUsed": 11904860828,
|
||||
"id": "WDC_WD120EDAZ-11F3RA0_5PJRD45C",
|
||||
"idx": 2,
|
||||
"name": "disk2",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": true,
|
||||
"size": 11718885324,
|
||||
"status": "DISK_OK",
|
||||
"temp": 30,
|
||||
"transport": "ata",
|
||||
"type": "Data",
|
||||
"warning": null,
|
||||
},
|
||||
{
|
||||
"comment": "",
|
||||
"critical": null,
|
||||
"device": "sde",
|
||||
"exportable": false,
|
||||
"format": "GPT: 4KiB-aligned",
|
||||
"fsFree": 5519945093,
|
||||
"fsSize": 11998001574,
|
||||
"fsType": "xfs",
|
||||
"fsUsed": 6478056481,
|
||||
"id": "WDC_WD120EMAZ-11BLFA0_5PH8BTYD",
|
||||
"idx": 3,
|
||||
"name": "disk3",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": true,
|
||||
"size": 11718885324,
|
||||
"status": "DISK_OK",
|
||||
"temp": 30,
|
||||
"transport": "ata",
|
||||
"type": "Data",
|
||||
"warning": null,
|
||||
},
|
||||
{
|
||||
"comment": "",
|
||||
"critical": null,
|
||||
"device": "sdi",
|
||||
"exportable": false,
|
||||
"format": "MBR: 4KiB-aligned",
|
||||
"fsFree": 111810683,
|
||||
"fsSize": 250059317,
|
||||
"fsType": "btrfs",
|
||||
"fsUsed": 137273827,
|
||||
"id": "Samsung_SSD_850_EVO_250GB_S2R5NX0H643734Z",
|
||||
"idx": 30,
|
||||
"name": "cache",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": false,
|
||||
"size": 244198552,
|
||||
"status": "DISK_OK",
|
||||
"temp": 22,
|
||||
"transport": "ata",
|
||||
"type": "Cache",
|
||||
"warning": null,
|
||||
},
|
||||
{
|
||||
"comment": null,
|
||||
"critical": null,
|
||||
"device": "nvme0n1",
|
||||
"exportable": false,
|
||||
"format": "MBR: 4KiB-aligned",
|
||||
"fsFree": null,
|
||||
"fsSize": null,
|
||||
"fsType": null,
|
||||
"fsUsed": null,
|
||||
"id": "KINGSTON_SA2000M8250G_50026B7282669D9E",
|
||||
"idx": 31,
|
||||
"name": "cache2",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": false,
|
||||
"size": 244198552,
|
||||
"status": "DISK_OK",
|
||||
"temp": 27,
|
||||
"transport": "nvme",
|
||||
"type": "Cache",
|
||||
"warning": null,
|
||||
},
|
||||
{
|
||||
"comment": "Unraid OS boot device",
|
||||
"critical": null,
|
||||
"device": "sda",
|
||||
"exportable": true,
|
||||
"format": "unknown",
|
||||
"fsFree": 3191407,
|
||||
"fsSize": 4042732,
|
||||
"fsType": "vfat",
|
||||
"fsUsed": 851325,
|
||||
"id": "Cruzer",
|
||||
"idx": 32,
|
||||
"name": "flash",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": true,
|
||||
"size": 3956700,
|
||||
"status": "DISK_OK",
|
||||
"temp": null,
|
||||
"transport": "usb",
|
||||
"type": "Flash",
|
||||
"warning": null,
|
||||
},
|
||||
]
|
||||
`);
|
||||
[
|
||||
{
|
||||
"comment": null,
|
||||
"critical": null,
|
||||
"device": "sdh",
|
||||
"exportable": false,
|
||||
"format": "GPT: 4KiB-aligned",
|
||||
"fsFree": null,
|
||||
"fsSize": null,
|
||||
"fsType": null,
|
||||
"fsUsed": null,
|
||||
"id": "ST18000NM000J-2TV103_ZR585CPY",
|
||||
"idx": 0,
|
||||
"name": "parity",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": true,
|
||||
"size": 17578328012,
|
||||
"status": "DISK_OK",
|
||||
"temp": 25,
|
||||
"transport": "ata",
|
||||
"type": "PARITY",
|
||||
"warning": null,
|
||||
},
|
||||
{
|
||||
"comment": "Seagate Exos",
|
||||
"critical": 75,
|
||||
"device": "sdf",
|
||||
"exportable": false,
|
||||
"format": "GPT: 4KiB-aligned",
|
||||
"fsFree": 13882739732,
|
||||
"fsSize": 17998742753,
|
||||
"fsType": "xfs",
|
||||
"fsUsed": 4116003021,
|
||||
"id": "ST18000NM000J-2TV103_ZR5B1W9X",
|
||||
"idx": 1,
|
||||
"name": "disk1",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": true,
|
||||
"size": 17578328012,
|
||||
"status": "DISK_OK",
|
||||
"temp": 30,
|
||||
"transport": "ata",
|
||||
"type": "DATA",
|
||||
"warning": 50,
|
||||
},
|
||||
{
|
||||
"comment": "",
|
||||
"critical": null,
|
||||
"device": "sdj",
|
||||
"exportable": false,
|
||||
"format": "GPT: 4KiB-aligned",
|
||||
"fsFree": 93140746,
|
||||
"fsSize": 11998001574,
|
||||
"fsType": "xfs",
|
||||
"fsUsed": 11904860828,
|
||||
"id": "WDC_WD120EDAZ-11F3RA0_5PJRD45C",
|
||||
"idx": 2,
|
||||
"name": "disk2",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": true,
|
||||
"size": 11718885324,
|
||||
"status": "DISK_OK",
|
||||
"temp": 30,
|
||||
"transport": "ata",
|
||||
"type": "DATA",
|
||||
"warning": null,
|
||||
},
|
||||
{
|
||||
"comment": "",
|
||||
"critical": null,
|
||||
"device": "sde",
|
||||
"exportable": false,
|
||||
"format": "GPT: 4KiB-aligned",
|
||||
"fsFree": 5519945093,
|
||||
"fsSize": 11998001574,
|
||||
"fsType": "xfs",
|
||||
"fsUsed": 6478056481,
|
||||
"id": "WDC_WD120EMAZ-11BLFA0_5PH8BTYD",
|
||||
"idx": 3,
|
||||
"name": "disk3",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": true,
|
||||
"size": 11718885324,
|
||||
"status": "DISK_OK",
|
||||
"temp": 30,
|
||||
"transport": "ata",
|
||||
"type": "DATA",
|
||||
"warning": null,
|
||||
},
|
||||
{
|
||||
"comment": "",
|
||||
"critical": null,
|
||||
"device": "sdi",
|
||||
"exportable": false,
|
||||
"format": "MBR: 4KiB-aligned",
|
||||
"fsFree": 111810683,
|
||||
"fsSize": 250059317,
|
||||
"fsType": "btrfs",
|
||||
"fsUsed": 137273827,
|
||||
"id": "Samsung_SSD_850_EVO_250GB_S2R5NX0H643734Z",
|
||||
"idx": 30,
|
||||
"name": "cache",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": false,
|
||||
"size": 244198552,
|
||||
"status": "DISK_OK",
|
||||
"temp": 22,
|
||||
"transport": "ata",
|
||||
"type": "CACHE",
|
||||
"warning": null,
|
||||
},
|
||||
{
|
||||
"comment": null,
|
||||
"critical": null,
|
||||
"device": "nvme0n1",
|
||||
"exportable": false,
|
||||
"format": "MBR: 4KiB-aligned",
|
||||
"fsFree": null,
|
||||
"fsSize": null,
|
||||
"fsType": null,
|
||||
"fsUsed": null,
|
||||
"id": "KINGSTON_SA2000M8250G_50026B7282669D9E",
|
||||
"idx": 31,
|
||||
"name": "cache2",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": false,
|
||||
"size": 244198552,
|
||||
"status": "DISK_OK",
|
||||
"temp": 27,
|
||||
"transport": "nvme",
|
||||
"type": "CACHE",
|
||||
"warning": null,
|
||||
},
|
||||
{
|
||||
"comment": "Unraid OS boot device",
|
||||
"critical": null,
|
||||
"device": "sda",
|
||||
"exportable": true,
|
||||
"format": "unknown",
|
||||
"fsFree": 3191407,
|
||||
"fsSize": 4042732,
|
||||
"fsType": "vfat",
|
||||
"fsUsed": 851325,
|
||||
"id": "Cruzer",
|
||||
"idx": 32,
|
||||
"name": "flash",
|
||||
"numErrors": 0,
|
||||
"numReads": 0,
|
||||
"numWrites": 0,
|
||||
"rotational": true,
|
||||
"size": 3956700,
|
||||
"status": "DISK_OK",
|
||||
"temp": null,
|
||||
"transport": "usb",
|
||||
"type": "FLASH",
|
||||
"warning": null,
|
||||
},
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
@@ -24,7 +24,7 @@ test('Returns parsed state file', async () => {
|
||||
"configErrorState": "INELIGIBLE",
|
||||
"configValid": false,
|
||||
"csrfToken": "0000000000000000",
|
||||
"defaultFsType": "xfs",
|
||||
"defaultFsType": "XFS",
|
||||
"deviceCount": 4,
|
||||
"domain": "",
|
||||
"domainLogin": "Administrator",
|
||||
|
||||
@@ -20,16 +20,7 @@ const getUnraidApiLocation = async () => {
|
||||
};
|
||||
|
||||
try {
|
||||
// Register plugins and create a dynamic module configuration
|
||||
const dynamicModule = await CliModule.registerWithPlugins();
|
||||
|
||||
// Create a new class that extends CliModule with the dynamic configuration
|
||||
const DynamicCliModule = class extends CliModule {
|
||||
static module = dynamicModule.module;
|
||||
static imports = dynamicModule.imports;
|
||||
static providers = dynamicModule.providers;
|
||||
};
|
||||
await CommandFactory.run(DynamicCliModule, {
|
||||
await CommandFactory.run(CliModule, {
|
||||
cliName: 'unraid-api',
|
||||
logger: LOG_LEVEL === 'TRACE' ? new LogService() : false, // - enable this to see nest initialization issues
|
||||
completion: {
|
||||
|
||||
@@ -20,6 +20,7 @@ const getAllowedSocks = (): string[] => [
|
||||
|
||||
const getLocalAccessUrlsForServer = (state: RootState = store.getState()): string[] => {
|
||||
const { emhttp } = state;
|
||||
|
||||
if (emhttp.status !== FileLoadStatus.LOADED) {
|
||||
return [];
|
||||
}
|
||||
@@ -90,7 +91,7 @@ const getApolloSandbox = (): string[] => {
|
||||
export const getAllowedOrigins = (state: RootState = store.getState()): string[] =>
|
||||
uniq([
|
||||
...getAllowedSocks(),
|
||||
...getLocalAccessUrlsForServer(),
|
||||
...getLocalAccessUrlsForServer(state),
|
||||
...getRemoteAccessUrlsForAllowedOrigins(state),
|
||||
...getExtraOrigins(),
|
||||
...getConnectOrigins(),
|
||||
|
||||
@@ -1,49 +0,0 @@
|
||||
import { ArrayRunningError } from '@app/core/errors/array-running-error.js';
|
||||
import { FieldMissingError } from '@app/core/errors/field-missing-error.js';
|
||||
import { getArrayData } from '@app/core/modules/array/get-array-data.js';
|
||||
import { type CoreContext, type CoreResult } from '@app/core/types/index.js';
|
||||
import { arrayIsRunning } from '@app/core/utils/array/array-is-running.js';
|
||||
import { emcmd } from '@app/core/utils/clients/emcmd.js';
|
||||
import { ensurePermission } from '@app/core/utils/permissions/ensure-permission.js';
|
||||
import { hasFields } from '@app/core/utils/validation/has-fields.js';
|
||||
|
||||
/**
|
||||
* Add a disk to the array.
|
||||
*/
|
||||
export const addDiskToArray = async function (context: CoreContext): Promise<CoreResult> {
|
||||
const { data = {}, user } = context;
|
||||
|
||||
// Check permissions
|
||||
ensurePermission(user, {
|
||||
resource: 'array',
|
||||
action: 'create',
|
||||
possession: 'any',
|
||||
});
|
||||
|
||||
const missingFields = hasFields(data, ['id']);
|
||||
if (missingFields.length !== 0) {
|
||||
// Just log first error
|
||||
throw new FieldMissingError(missingFields[0]);
|
||||
}
|
||||
|
||||
if (arrayIsRunning()) {
|
||||
throw new ArrayRunningError();
|
||||
}
|
||||
|
||||
const { id: diskId, slot: preferredSlot } = data;
|
||||
const slot = Number.parseInt(preferredSlot as string, 10);
|
||||
|
||||
// Add disk
|
||||
await emcmd({
|
||||
changeDevice: 'apply',
|
||||
[`slotId.${slot}`]: diskId,
|
||||
});
|
||||
|
||||
const array = getArrayData();
|
||||
|
||||
// Disk added successfully
|
||||
return {
|
||||
text: `Disk was added to the array in slot ${slot}.`,
|
||||
json: array,
|
||||
};
|
||||
};
|
||||
@@ -1,12 +1,16 @@
|
||||
import { GraphQLError } from 'graphql';
|
||||
import { sum } from 'lodash-es';
|
||||
|
||||
import type { ArrayCapacity, ArrayType } from '@app/graphql/generated/api/types.js';
|
||||
import { ArrayDiskType } from '@app/graphql/generated/api/types.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { FileLoadStatus } from '@app/store/types.js';
|
||||
import {
|
||||
ArrayCapacity,
|
||||
ArrayDiskType,
|
||||
ArrayState,
|
||||
UnraidArray,
|
||||
} from '@app/unraid-api/graph/resolvers/array/array.model.js';
|
||||
|
||||
export const getArrayData = (getState = store.getState): ArrayType => {
|
||||
export const getArrayData = (getState = store.getState): UnraidArray => {
|
||||
// Var state isn't loaded
|
||||
const state = getState();
|
||||
if (
|
||||
@@ -51,7 +55,7 @@ export const getArrayData = (getState = store.getState): ArrayType => {
|
||||
|
||||
return {
|
||||
id: 'array',
|
||||
state: emhttp.var.mdState,
|
||||
state: emhttp.var.mdState as ArrayState,
|
||||
capacity,
|
||||
boot,
|
||||
parities,
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
// Created from 'create-ts-index'
|
||||
|
||||
export * from './add-disk-to-array.js';
|
||||
export * from './remove-disk-from-array.js';
|
||||
export * from './update-array.js';
|
||||
export * from './update-parity-check.js';
|
||||
@@ -1,45 +0,0 @@
|
||||
import { ArrayRunningError } from '@app/core/errors/array-running-error.js';
|
||||
import { FieldMissingError } from '@app/core/errors/field-missing-error.js';
|
||||
import { getArrayData } from '@app/core/modules/array/get-array-data.js';
|
||||
import { type CoreContext, type CoreResult } from '@app/core/types/index.js';
|
||||
import { arrayIsRunning } from '@app/core/utils/array/array-is-running.js';
|
||||
import { hasFields } from '@app/core/utils/validation/has-fields.js';
|
||||
|
||||
interface Context extends CoreContext {
|
||||
data: {
|
||||
/** The slot the disk is in. */
|
||||
slot: string;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a disk from the array.
|
||||
* @returns The updated array.
|
||||
*/
|
||||
export const removeDiskFromArray = async (context: Context): Promise<CoreResult> => {
|
||||
const { data } = context;
|
||||
const missingFields = hasFields(data, ['id']);
|
||||
|
||||
if (missingFields.length !== 0) {
|
||||
// Only log first error
|
||||
throw new FieldMissingError(missingFields[0]);
|
||||
}
|
||||
|
||||
if (arrayIsRunning()) {
|
||||
throw new ArrayRunningError();
|
||||
}
|
||||
|
||||
const { slot } = data;
|
||||
|
||||
// Error removing disk
|
||||
// if () {
|
||||
// }
|
||||
|
||||
const array = getArrayData();
|
||||
|
||||
// Disk removed successfully
|
||||
return {
|
||||
text: `Disk was removed from the array in slot ${slot}.`,
|
||||
json: array,
|
||||
};
|
||||
};
|
||||
@@ -1,88 +0,0 @@
|
||||
import type { CoreContext, CoreResult } from '@app/core/types/index.js';
|
||||
import { AppError } from '@app/core/errors/app-error.js';
|
||||
import { FieldMissingError } from '@app/core/errors/field-missing-error.js';
|
||||
import { ParamInvalidError } from '@app/core/errors/param-invalid-error.js';
|
||||
import { getArrayData } from '@app/core/modules/array/get-array-data.js';
|
||||
import { arrayIsRunning } from '@app/core/utils/array/array-is-running.js';
|
||||
import { emcmd } from '@app/core/utils/clients/emcmd.js';
|
||||
import { uppercaseFirstChar } from '@app/core/utils/misc/uppercase-first-char.js';
|
||||
import { ensurePermission } from '@app/core/utils/permissions/ensure-permission.js';
|
||||
import { hasFields } from '@app/core/utils/validation/has-fields.js';
|
||||
|
||||
// @TODO: Fix this not working across node apps
|
||||
// each app has it's own lock since the var is scoped
|
||||
// ideally this should have a timeout to prevent it sticking
|
||||
let locked = false;
|
||||
|
||||
export const updateArray = async (context: CoreContext): Promise<CoreResult> => {
|
||||
const { data = {}, user } = context;
|
||||
|
||||
// Check permissions
|
||||
ensurePermission(user, {
|
||||
resource: 'array',
|
||||
action: 'update',
|
||||
possession: 'any',
|
||||
});
|
||||
|
||||
const missingFields = hasFields(data, ['state']);
|
||||
|
||||
if (missingFields.length !== 0) {
|
||||
// Only log first error
|
||||
throw new FieldMissingError(missingFields[0]);
|
||||
}
|
||||
|
||||
const { state: nextState } = data as { state: string };
|
||||
const startState = arrayIsRunning() ? 'started' : 'stopped';
|
||||
const pendingState = nextState === 'stop' ? 'stopping' : 'starting';
|
||||
|
||||
if (!['start', 'stop'].includes(nextState)) {
|
||||
throw new ParamInvalidError('state', nextState);
|
||||
}
|
||||
|
||||
// Prevent this running multiple times at once
|
||||
if (locked) {
|
||||
throw new AppError('Array state is still being updated.');
|
||||
}
|
||||
|
||||
// Prevent starting/stopping array when it's already in the same state
|
||||
if ((arrayIsRunning() && nextState === 'start') || (!arrayIsRunning() && nextState === 'stop')) {
|
||||
throw new AppError(`The array is already ${startState}`);
|
||||
}
|
||||
|
||||
// Set lock then start/stop array
|
||||
locked = true;
|
||||
const command = {
|
||||
[`cmd${uppercaseFirstChar(nextState)}`]: uppercaseFirstChar(nextState),
|
||||
startState: startState.toUpperCase(),
|
||||
};
|
||||
|
||||
// `await` has to be used otherwise the catch
|
||||
// will finish after the return statement below
|
||||
await emcmd(command).finally(() => {
|
||||
locked = false;
|
||||
});
|
||||
|
||||
// Get new array JSON
|
||||
const array = getArrayData();
|
||||
|
||||
/**
|
||||
* Update array details
|
||||
*
|
||||
* @memberof Core
|
||||
* @module array/update-array
|
||||
* @param {Core~Context} context Context object.
|
||||
* @param {Object} context.data The data object.
|
||||
* @param {'start'|'stop'} context.data.state If the array should be started or stopped.
|
||||
* @param {State~User} context.user The current user.
|
||||
* @returns {Core~Result} The updated array.
|
||||
*/
|
||||
return {
|
||||
text: `Array was ${startState}, ${pendingState}.`,
|
||||
json: {
|
||||
...array,
|
||||
state: nextState === 'start' ? 'started' : 'stopped',
|
||||
previousState: startState,
|
||||
pendingState,
|
||||
},
|
||||
};
|
||||
};
|
||||
@@ -1,78 +0,0 @@
|
||||
import type { CoreContext, CoreResult } from '@app/core/types/index.js';
|
||||
import { FieldMissingError } from '@app/core/errors/field-missing-error.js';
|
||||
import { ParamInvalidError } from '@app/core/errors/param-invalid-error.js';
|
||||
import { emcmd } from '@app/core/utils/clients/emcmd.js';
|
||||
import { ensurePermission } from '@app/core/utils/permissions/ensure-permission.js';
|
||||
import { getters } from '@app/store/index.js';
|
||||
|
||||
type State = 'start' | 'cancel' | 'resume' | 'cancel';
|
||||
|
||||
interface Context extends CoreContext {
|
||||
data: {
|
||||
state?: State;
|
||||
correct?: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a disk from the array.
|
||||
* @returns The update array.
|
||||
*/
|
||||
export const updateParityCheck = async (context: Context): Promise<CoreResult> => {
|
||||
const { user, data } = context;
|
||||
|
||||
// Check permissions
|
||||
ensurePermission(user, {
|
||||
resource: 'array',
|
||||
action: 'update',
|
||||
possession: 'any',
|
||||
});
|
||||
|
||||
// Validation
|
||||
if (!data.state) {
|
||||
throw new FieldMissingError('state');
|
||||
}
|
||||
|
||||
const { state: wantedState } = data;
|
||||
const emhttp = getters.emhttp();
|
||||
const running = emhttp.var.mdResync !== 0;
|
||||
const states = {
|
||||
pause: {
|
||||
cmdNoCheck: 'Pause',
|
||||
},
|
||||
resume: {
|
||||
cmdCheck: 'Resume',
|
||||
},
|
||||
cancel: {
|
||||
cmdNoCheck: 'Cancel',
|
||||
},
|
||||
start: {
|
||||
cmdCheck: 'Check',
|
||||
},
|
||||
};
|
||||
|
||||
let allowedStates = Object.keys(states);
|
||||
|
||||
// Only allow starting a check if there isn't already one running
|
||||
if (running) {
|
||||
allowedStates = allowedStates.splice(allowedStates.indexOf('start'), 1);
|
||||
}
|
||||
|
||||
// Only allow states from states object
|
||||
if (!allowedStates.includes(wantedState)) {
|
||||
throw new ParamInvalidError('state', wantedState);
|
||||
}
|
||||
|
||||
// Should we write correction to the parity during the check
|
||||
const writeCorrectionsToParity = wantedState === 'start' && data.correct;
|
||||
|
||||
await emcmd({
|
||||
startState: 'STARTED',
|
||||
...states[wantedState],
|
||||
...(writeCorrectionsToParity ? { optionCorrect: 'correct' } : {}),
|
||||
});
|
||||
|
||||
return {
|
||||
json: {},
|
||||
};
|
||||
};
|
||||
@@ -1,6 +1,6 @@
|
||||
import { AppError } from '@app/core/errors/app-error.js';
|
||||
import { type CoreContext, type CoreResult } from '@app/core/types/index.js';
|
||||
import { Disk } from '@app/graphql/generated/api/types.js';
|
||||
import { ArrayDisk } from '@app/unraid-api/graph/resolvers/array/array.model.js';
|
||||
|
||||
interface Context extends CoreContext {
|
||||
params: {
|
||||
@@ -11,7 +11,7 @@ interface Context extends CoreContext {
|
||||
/**
|
||||
* Get a single disk.
|
||||
*/
|
||||
export const getDisk = async (context: Context, Disks: Disk[]): Promise<CoreResult> => {
|
||||
export const getDisk = async (context: Context, Disks: ArrayDisk[]): Promise<CoreResult> => {
|
||||
const { params } = context;
|
||||
|
||||
const { id } = params;
|
||||
|
||||
@@ -1,90 +0,0 @@
|
||||
import fs from 'fs';
|
||||
|
||||
import camelCaseKeys from 'camelcase-keys';
|
||||
|
||||
import type { ContainerPort, Docker, DockerContainer } from '@app/graphql/generated/api/types.js';
|
||||
import { dockerLogger } from '@app/core/log.js';
|
||||
import { docker } from '@app/core/utils/clients/docker.js';
|
||||
import { catchHandlers } from '@app/core/utils/misc/catch-handlers.js';
|
||||
import { ContainerPortType, ContainerState } from '@app/graphql/generated/api/types.js';
|
||||
import { getters, store } from '@app/store/index.js';
|
||||
import { updateDockerState } from '@app/store/modules/docker.js';
|
||||
|
||||
export interface ContainerListingOptions {
|
||||
useCache?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all Docker containers.
|
||||
* @returns All the in/active Docker containers on the system.
|
||||
*/
|
||||
export const getDockerContainers = async (
|
||||
{ useCache }: ContainerListingOptions = { useCache: true }
|
||||
): Promise<Array<DockerContainer>> => {
|
||||
const dockerState = getters.docker();
|
||||
if (useCache && dockerState.containers) {
|
||||
dockerLogger.trace('Using docker container cache');
|
||||
return dockerState.containers;
|
||||
}
|
||||
|
||||
dockerLogger.trace('Skipping docker container cache');
|
||||
|
||||
/**
|
||||
* Docker auto start file
|
||||
*
|
||||
* @note Doesn't exist if array is offline.
|
||||
* @see https://github.com/limetech/webgui/issues/502#issue-480992547
|
||||
*/
|
||||
const autoStartFile = await fs.promises
|
||||
.readFile(getters.paths()['docker-autostart'], 'utf8')
|
||||
.then((file) => file.toString())
|
||||
.catch(() => '');
|
||||
const autoStarts = autoStartFile.split('\n');
|
||||
const rawContainers = await docker
|
||||
.listContainers({
|
||||
all: true,
|
||||
size: true,
|
||||
})
|
||||
// If docker throws an error return no containers
|
||||
.catch(catchHandlers.docker);
|
||||
|
||||
// Cleanup container object
|
||||
const containers: Array<DockerContainer> = rawContainers.map((container) => {
|
||||
const names = container.Names[0];
|
||||
const containerData: DockerContainer = camelCaseKeys<DockerContainer>(
|
||||
{
|
||||
labels: container.Labels ?? {},
|
||||
sizeRootFs: undefined,
|
||||
imageId: container.ImageID,
|
||||
state:
|
||||
typeof container.State === 'string'
|
||||
? (ContainerState[container.State.toUpperCase()] ?? ContainerState.EXITED)
|
||||
: ContainerState.EXITED,
|
||||
autoStart: autoStarts.includes(names.split('/')[1]),
|
||||
ports: container.Ports.map<ContainerPort>((port) => ({
|
||||
...port,
|
||||
type: ContainerPortType[port.Type.toUpperCase()],
|
||||
})),
|
||||
command: container.Command,
|
||||
created: container.Created,
|
||||
mounts: container.Mounts,
|
||||
networkSettings: container.NetworkSettings,
|
||||
hostConfig: {
|
||||
networkMode: container.HostConfig.NetworkMode,
|
||||
},
|
||||
id: container.Id,
|
||||
image: container.Image,
|
||||
status: container.Status,
|
||||
},
|
||||
{ deep: true }
|
||||
);
|
||||
return containerData;
|
||||
});
|
||||
|
||||
// Get all of the current containers
|
||||
const installed = containers.length;
|
||||
const running = containers.filter((container) => container.state === ContainerState.RUNNING).length;
|
||||
|
||||
store.dispatch(updateDockerState({ containers, installed, running }));
|
||||
return containers;
|
||||
};
|
||||
@@ -1,39 +0,0 @@
|
||||
import camelCaseKeys from 'camelcase-keys';
|
||||
|
||||
import { type CoreContext, type CoreResult } from '@app/core/types/index.js';
|
||||
import { docker } from '@app/core/utils/index.js';
|
||||
import { catchHandlers } from '@app/core/utils/misc/catch-handlers.js';
|
||||
import { ensurePermission } from '@app/core/utils/permissions/ensure-permission.js';
|
||||
|
||||
export const getDockerNetworks = async (context: CoreContext): Promise<CoreResult> => {
|
||||
const { user } = context;
|
||||
|
||||
// Check permissions
|
||||
ensurePermission(user, {
|
||||
resource: 'docker/network',
|
||||
action: 'read',
|
||||
possession: 'any',
|
||||
});
|
||||
|
||||
const networks = await docker
|
||||
.listNetworks()
|
||||
// If docker throws an error return no networks
|
||||
.catch(catchHandlers.docker)
|
||||
.then((networks = []) =>
|
||||
networks.map((object) =>
|
||||
camelCaseKeys(object as unknown as Record<string, unknown>, { deep: true })
|
||||
)
|
||||
);
|
||||
|
||||
/**
|
||||
* Get all Docker networks
|
||||
*
|
||||
* @memberof Core
|
||||
* @module docker/get-networks
|
||||
* @param {Core~Context} context
|
||||
* @returns {Core~Result} All the in/active Docker networks on the system.
|
||||
*/
|
||||
return {
|
||||
json: networks,
|
||||
};
|
||||
};
|
||||
@@ -1,4 +0,0 @@
|
||||
// Created from 'create-ts-index'
|
||||
|
||||
export * from './get-docker-containers.js';
|
||||
export * from './get-docker-networks.js';
|
||||
@@ -1,92 +0,0 @@
|
||||
import type { Systeminformation } from 'systeminformation';
|
||||
import { execa } from 'execa';
|
||||
import { blockDevices, diskLayout } from 'systeminformation';
|
||||
|
||||
import type { Disk } from '@app/graphql/generated/api/types.js';
|
||||
import { graphqlLogger } from '@app/core/log.js';
|
||||
import { DiskFsType, DiskInterfaceType, DiskSmartStatus } from '@app/graphql/generated/api/types.js';
|
||||
import { batchProcess } from '@app/utils.js';
|
||||
|
||||
const getTemperature = async (disk: Systeminformation.DiskLayoutData): Promise<number> => {
|
||||
try {
|
||||
const stdout = await execa('smartctl', ['-A', disk.device])
|
||||
.then(({ stdout }) => stdout)
|
||||
.catch(() => '');
|
||||
const lines = stdout.split('\n');
|
||||
const header = lines.find((line) => line.startsWith('ID#')) ?? '';
|
||||
const fields = lines.splice(lines.indexOf(header) + 1, lines.length);
|
||||
const field = fields.find(
|
||||
(line) => line.includes('Temperature_Celsius') || line.includes('Airflow_Temperature_Cel')
|
||||
);
|
||||
|
||||
if (!field) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (field.includes('Min/Max')) {
|
||||
return Number.parseInt(field.split(' - ')[1].trim().split(' ')[0], 10);
|
||||
}
|
||||
|
||||
const line = field.split(' ');
|
||||
return Number.parseInt(line[line.length - 1], 10);
|
||||
} catch (error) {
|
||||
graphqlLogger.warn('Caught error fetching disk temperature: %o', error);
|
||||
return -1;
|
||||
}
|
||||
};
|
||||
|
||||
const parseDisk = async (
|
||||
disk: Systeminformation.DiskLayoutData,
|
||||
partitionsToParse: Systeminformation.BlockDevicesData[],
|
||||
temperature = false
|
||||
): Promise<Disk> => {
|
||||
const partitions = partitionsToParse
|
||||
// Only get partitions from this disk
|
||||
.filter((partition) => partition.name.startsWith(disk.device.split('/dev/')[1]))
|
||||
// Remove unneeded fields
|
||||
.map(({ name, fsType, size }) => ({
|
||||
name,
|
||||
fsType: typeof fsType === 'string' ? DiskFsType[fsType] : undefined,
|
||||
size,
|
||||
}));
|
||||
|
||||
return {
|
||||
...disk,
|
||||
smartStatus:
|
||||
typeof disk.smartStatus === 'string'
|
||||
? DiskSmartStatus[disk.smartStatus.toUpperCase()]
|
||||
: undefined,
|
||||
interfaceType:
|
||||
typeof disk.interfaceType === 'string'
|
||||
? DiskInterfaceType[disk.interfaceType]
|
||||
: DiskInterfaceType.UNKNOWN,
|
||||
temperature: temperature ? await getTemperature(disk) : -1,
|
||||
partitions,
|
||||
id: disk.serialNum,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Get all disks.
|
||||
*/
|
||||
export const getDisks = async (options?: { temperature: boolean }): Promise<Disk[]> => {
|
||||
// Return all fields but temperature
|
||||
if (options?.temperature === false) {
|
||||
const partitions = await blockDevices().then((devices) =>
|
||||
devices.filter((device) => device.type === 'part')
|
||||
);
|
||||
const diskLayoutData = await diskLayout();
|
||||
const disks = await Promise.all(diskLayoutData.map((disk) => parseDisk(disk, partitions)));
|
||||
|
||||
return disks;
|
||||
}
|
||||
|
||||
const partitions = await blockDevices().then((devices) =>
|
||||
devices.filter((device) => device.type === 'part')
|
||||
);
|
||||
|
||||
const { data } = await batchProcess(await diskLayout(), async (disk) =>
|
||||
parseDisk(disk, partitions, true)
|
||||
);
|
||||
return data;
|
||||
};
|
||||
@@ -1,62 +0,0 @@
|
||||
import { promises as fs } from 'fs';
|
||||
|
||||
import Table from 'cli-table';
|
||||
|
||||
import { FileMissingError } from '@app/core/errors/file-missing-error.js';
|
||||
import { type CoreContext, type CoreResult } from '@app/core/types/index.js';
|
||||
import { ensurePermission } from '@app/core/utils/permissions/ensure-permission.js';
|
||||
import { getters } from '@app/store/index.js';
|
||||
|
||||
/**
|
||||
* Get parity history.
|
||||
* @returns All parity checks with their respective date, duration, speed, status and errors.
|
||||
*/
|
||||
export const getParityHistory = async (context: CoreContext): Promise<CoreResult> => {
|
||||
const { user } = context;
|
||||
|
||||
// Bail if the user doesn't have permission
|
||||
ensurePermission(user, {
|
||||
resource: 'parity-history',
|
||||
action: 'read',
|
||||
possession: 'any',
|
||||
});
|
||||
|
||||
const historyFilePath = getters.paths()['parity-checks'];
|
||||
const history = await fs.readFile(historyFilePath).catch(() => {
|
||||
throw new FileMissingError(historyFilePath);
|
||||
});
|
||||
|
||||
// Convert checks into array of objects
|
||||
const lines = history.toString().trim().split('\n').reverse();
|
||||
const parityChecks = lines.map((line) => {
|
||||
const [date, duration, speed, status, errors = '0'] = line.split('|');
|
||||
return {
|
||||
date,
|
||||
duration: Number.parseInt(duration, 10),
|
||||
speed,
|
||||
status,
|
||||
errors: Number.parseInt(errors, 10),
|
||||
};
|
||||
});
|
||||
|
||||
// Create table for text output
|
||||
const table = new Table({
|
||||
head: ['Date', 'Duration', 'Speed', 'Status', 'Errors'],
|
||||
});
|
||||
// Update raw values with strings
|
||||
parityChecks.forEach((check) => {
|
||||
const array = Object.values({
|
||||
date: check.date,
|
||||
speed: check.speed ? check.speed : 'Unavailable',
|
||||
duration: check.duration >= 0 ? check.duration.toString() : 'Unavailable',
|
||||
status: check.status === '-4' ? 'Cancelled' : 'OK',
|
||||
errors: check.errors.toString(),
|
||||
});
|
||||
table.push(array);
|
||||
});
|
||||
|
||||
return {
|
||||
text: table.toString(),
|
||||
json: parityChecks,
|
||||
};
|
||||
};
|
||||
@@ -1,20 +1,15 @@
|
||||
// Created from 'create-ts-index'
|
||||
|
||||
export * from './array/index.js';
|
||||
export * from './debug/index.js';
|
||||
export * from './disks/index.js';
|
||||
export * from './docker/index.js';
|
||||
export * from './services/index.js';
|
||||
export * from './settings/index.js';
|
||||
export * from './shares/index.js';
|
||||
export * from './users/index.js';
|
||||
export * from './vms/index.js';
|
||||
export * from './add-share.js';
|
||||
export * from './add-user.js';
|
||||
export * from './get-apps.js';
|
||||
export * from './get-devices.js';
|
||||
export * from './get-disks.js';
|
||||
export * from './get-parity-history.js';
|
||||
export * from './get-services.js';
|
||||
export * from './get-users.js';
|
||||
export * from './get-welcome.js';
|
||||
|
||||
@@ -1,63 +0,0 @@
|
||||
import { GraphQLError } from 'graphql';
|
||||
|
||||
import type { VmDomain } from '@app/graphql/generated/api/types.js';
|
||||
import { VmState } from '@app/graphql/generated/api/types.js';
|
||||
|
||||
const states = {
|
||||
0: 'NOSTATE',
|
||||
1: 'RUNNING',
|
||||
2: 'IDLE',
|
||||
3: 'PAUSED',
|
||||
4: 'SHUTDOWN',
|
||||
5: 'SHUTOFF',
|
||||
6: 'CRASHED',
|
||||
7: 'PMSUSPENDED',
|
||||
};
|
||||
|
||||
/**
|
||||
* Get vm domains.
|
||||
*/
|
||||
export const getDomains = async () => {
|
||||
try {
|
||||
const { ConnectListAllDomainsFlags } = await import('@unraid/libvirt');
|
||||
const { UnraidHypervisor } = await import('@app/core/utils/vms/get-hypervisor.js');
|
||||
|
||||
const hypervisor = await UnraidHypervisor.getInstance().getHypervisor();
|
||||
if (!hypervisor) {
|
||||
throw new GraphQLError('VMs Disabled');
|
||||
}
|
||||
|
||||
const autoStartDomains = await hypervisor.connectListAllDomains(
|
||||
ConnectListAllDomainsFlags.AUTOSTART
|
||||
);
|
||||
|
||||
const autoStartDomainNames = await Promise.all(
|
||||
autoStartDomains.map(async (domain) => hypervisor.domainGetName(domain))
|
||||
);
|
||||
|
||||
// Get all domains
|
||||
const domains = await hypervisor.connectListAllDomains();
|
||||
|
||||
const resolvedDomains: Array<VmDomain> = await Promise.all(
|
||||
domains.map(async (domain) => {
|
||||
const info = await hypervisor.domainGetInfo(domain);
|
||||
const name = await hypervisor.domainGetName(domain);
|
||||
const features = {};
|
||||
return {
|
||||
name,
|
||||
uuid: await hypervisor.domainGetUUIDString(domain),
|
||||
state: VmState[states[info.state]] ?? VmState.NOSTATE,
|
||||
autoStart: autoStartDomainNames.includes(name),
|
||||
features,
|
||||
};
|
||||
})
|
||||
);
|
||||
|
||||
return resolvedDomains;
|
||||
} catch (error: unknown) {
|
||||
// If we hit an error expect libvirt to be offline
|
||||
throw new GraphQLError(
|
||||
`Failed to fetch domains with error: ${error instanceof Error ? error.message : 'Unknown Error'}`
|
||||
);
|
||||
}
|
||||
};
|
||||
@@ -1,2 +0,0 @@
|
||||
// Created from 'create-ts-index'
|
||||
export * from './get-domains.js';
|
||||
@@ -19,6 +19,7 @@ export enum PUBSUB_CHANNEL {
|
||||
VMS = 'VMS',
|
||||
REGISTRATION = 'REGISTRATION',
|
||||
LOG_FILE = 'LOG_FILE',
|
||||
PARITY = 'PARITY',
|
||||
}
|
||||
|
||||
export const pubsub = new PubSub({ eventEmitter });
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import type {
|
||||
ArrayState,
|
||||
DiskFsType,
|
||||
import { ArrayState } from '@app/unraid-api/graph/resolvers/array/array.model.js';
|
||||
import { DiskFsType } from '@app/unraid-api/graph/resolvers/disks/disks.model.js';
|
||||
import {
|
||||
RegistrationState,
|
||||
registrationType,
|
||||
} from '@app/graphql/generated/api/types.js';
|
||||
import { ConfigErrorState } from '@app/graphql/generated/api/types.js';
|
||||
RegistrationType,
|
||||
} from '@app/unraid-api/graph/resolvers/registration/registration.model.js';
|
||||
import { ConfigErrorState } from '@app/unraid-api/graph/resolvers/vars/vars.model.js';
|
||||
|
||||
/**
|
||||
* Global vars
|
||||
@@ -128,7 +128,7 @@ export type Var = {
|
||||
/** Who the current Unraid key is registered to. */
|
||||
regTo: string;
|
||||
/** Which type of key this is. */
|
||||
regTy: registrationType;
|
||||
regTy: RegistrationType;
|
||||
/** Is the server currently in safe mode. */
|
||||
safeMode: boolean;
|
||||
sbClean: boolean;
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
import { ArrayState } from '@app/graphql/generated/api/types.js';
|
||||
import { getters } from '@app/store/index.js';
|
||||
|
||||
/**
|
||||
* Is the array running?
|
||||
*/
|
||||
export const arrayIsRunning = () => {
|
||||
const emhttp = getters.emhttp();
|
||||
return emhttp.var.mdState === ArrayState.STARTED;
|
||||
};
|
||||
@@ -1,3 +0,0 @@
|
||||
// Created from 'create-ts-index'
|
||||
|
||||
export * from './array-is-running.js';
|
||||
@@ -1,6 +1,5 @@
|
||||
// Created from 'create-ts-index'
|
||||
|
||||
export * from './array/index.js';
|
||||
export * from './clients/index.js';
|
||||
export * from './plugins/index.js';
|
||||
export * from './shares/index.js';
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { DiskShare, Share, UserShare } from '@app/core/types/states/share.js';
|
||||
import type { ArrayDisk } from '@app/graphql/generated/api/types.js';
|
||||
import { getters } from '@app/store/index.js';
|
||||
import { ArrayDisk } from '@app/unraid-api/graph/resolvers/array/array.model.js';
|
||||
|
||||
const processors = {
|
||||
user(share: Share) {
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
import { constants } from 'fs';
|
||||
import { access } from 'fs/promises';
|
||||
|
||||
import { type Hypervisor as HypervisorType } from '@unraid/libvirt';
|
||||
|
||||
import { libvirtLogger } from '@app/core/log.js';
|
||||
|
||||
const uri = process.env.LIBVIRT_URI ?? 'qemu:///system';
|
||||
|
||||
const libvirtPid = '/var/run/libvirt/libvirtd.pid';
|
||||
|
||||
const isLibvirtRunning = async (): Promise<boolean> => {
|
||||
try {
|
||||
await access(libvirtPid, constants.F_OK | constants.R_OK);
|
||||
return true;
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
export class UnraidHypervisor {
|
||||
private static instance: UnraidHypervisor | null = null;
|
||||
private hypervisor: HypervisorType | null = null;
|
||||
private constructor() {}
|
||||
|
||||
public static getInstance(): UnraidHypervisor {
|
||||
if (this.instance === null) {
|
||||
this.instance = new UnraidHypervisor();
|
||||
}
|
||||
return this.instance;
|
||||
}
|
||||
|
||||
public async getHypervisor(): Promise<HypervisorType | null> {
|
||||
// Return hypervisor if it's already connected
|
||||
const running = await isLibvirtRunning();
|
||||
|
||||
if (this.hypervisor && running) {
|
||||
return this.hypervisor;
|
||||
}
|
||||
|
||||
if (!running) {
|
||||
this.hypervisor = null;
|
||||
throw new Error('Libvirt is not running');
|
||||
}
|
||||
const { Hypervisor } = await import('@unraid/libvirt');
|
||||
this.hypervisor = new Hypervisor({ uri });
|
||||
await this.hypervisor.connectOpen().catch((error: unknown) => {
|
||||
libvirtLogger.error(
|
||||
`Failed starting VM hypervisor connection with "${(error as Error).message}"`
|
||||
);
|
||||
|
||||
throw error;
|
||||
});
|
||||
|
||||
return this.hypervisor;
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,4 @@
|
||||
// Created from 'create-ts-index'
|
||||
|
||||
export * from './filter-devices.js';
|
||||
export * from './get-hypervisor.js';
|
||||
export * from './get-pci-devices.js';
|
||||
export * from './parse-domain.js';
|
||||
export * from './parse-domains.js';
|
||||
export * from './system-network-interfaces.js';
|
||||
|
||||
@@ -1,60 +0,0 @@
|
||||
import { type Domain } from '@app/core/types/index.js';
|
||||
|
||||
export type DomainLookupType = 'id' | 'uuid' | 'name';
|
||||
|
||||
/**
|
||||
* Parse domain
|
||||
*
|
||||
* @param type What lookup type to use.
|
||||
* @param id The domain's ID, UUID or name.
|
||||
* @private
|
||||
*/
|
||||
export const parseDomain = async (type: DomainLookupType, id: string): Promise<Domain> => {
|
||||
const types = {
|
||||
id: 'lookupDomainByIdAsync',
|
||||
uuid: 'lookupDomainByUUIDAsync',
|
||||
name: 'lookupDomainByNameAsync',
|
||||
};
|
||||
|
||||
if (!type || !Object.keys(types).includes(type)) {
|
||||
throw new Error(`Type must be one of [${Object.keys(types).join(', ')}], ${type} given.`);
|
||||
}
|
||||
|
||||
const { UnraidHypervisor } = await import('@app/core/utils/vms/get-hypervisor.js');
|
||||
const client = await UnraidHypervisor.getInstance().getHypervisor();
|
||||
const method = types[type];
|
||||
const domain = await client[method](id);
|
||||
const info = await domain.getInfoAsync();
|
||||
|
||||
const [uuid, osType, autostart, maxMemory, schedulerType, schedulerParameters, securityLabel, name] =
|
||||
await Promise.all([
|
||||
domain.getUUIDAsync(),
|
||||
domain.getOSTypeAsync(),
|
||||
domain.getAutostartAsync(),
|
||||
domain.getMaxMemoryAsync(),
|
||||
domain.getSchedulerTypeAsync(),
|
||||
domain.getSchedulerParametersAsync(),
|
||||
domain.getSecurityLabelAsync(),
|
||||
domain.getNameAsync(),
|
||||
]);
|
||||
|
||||
const results = {
|
||||
uuid,
|
||||
osType,
|
||||
autostart,
|
||||
maxMemory,
|
||||
schedulerType,
|
||||
schedulerParameters,
|
||||
securityLabel,
|
||||
name,
|
||||
...info,
|
||||
state: info.state.replace(' ', '_'),
|
||||
};
|
||||
|
||||
if (info.state === 'running') {
|
||||
results.vcpus = await domain.getVcpusAsync();
|
||||
results.memoryStats = await domain.getMemoryStatsAsync();
|
||||
}
|
||||
|
||||
return results;
|
||||
};
|
||||
@@ -1,9 +0,0 @@
|
||||
import type { DomainLookupType } from '@app/core/utils/vms/parse-domain.js';
|
||||
import { type Domain } from '@app/core/types/index.js';
|
||||
import { parseDomain } from '@app/core/utils/vms/parse-domain.js';
|
||||
|
||||
/**
|
||||
* Parse domains.
|
||||
*/
|
||||
export const parseDomains = async (type: DomainLookupType, domains: string[]): Promise<Domain[]> =>
|
||||
Promise.all(domains.map(async (domain) => parseDomain(type, domain)));
|
||||
@@ -1,3 +0,0 @@
|
||||
import { networkInterfaces } from 'systeminformation';
|
||||
|
||||
export const systemNetworkInterfaces = networkInterfaces();
|
||||
@@ -5,40 +5,49 @@ import { fileURLToPath } from 'node:url';
|
||||
|
||||
import type { PackageJson, SetRequired } from 'type-fest';
|
||||
|
||||
import { fileExistsSync } from '@app/core/utils/files/file-exists.js';
|
||||
|
||||
/**
|
||||
* Tries to get the package.json at the given location.
|
||||
* @param location - The location of the package.json file, relative to the current file
|
||||
* @returns The package.json object or undefined if unable to read
|
||||
* Returns the absolute path to the given file.
|
||||
* @param location - The location of the file, relative to the current file
|
||||
* @returns The absolute path to the file
|
||||
*/
|
||||
function readPackageJson(location: string): PackageJson | undefined {
|
||||
function getAbsolutePath(location: string): string {
|
||||
try {
|
||||
let packageJsonPath: string;
|
||||
try {
|
||||
const packageJsonUrl = import.meta.resolve(location);
|
||||
packageJsonPath = fileURLToPath(packageJsonUrl);
|
||||
} catch {
|
||||
// Fallback (e.g. for local development): resolve the path relative to this module
|
||||
packageJsonPath = fileURLToPath(new URL(location, import.meta.url));
|
||||
}
|
||||
const packageJsonRaw = readFileSync(packageJsonPath, 'utf-8');
|
||||
return JSON.parse(packageJsonRaw) as PackageJson;
|
||||
const fileUrl = import.meta.resolve(location);
|
||||
return fileURLToPath(fileUrl);
|
||||
} catch {
|
||||
return undefined;
|
||||
return fileURLToPath(new URL(location, import.meta.url));
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Returns the path to the api's package.json file. Throws if unable to find.
|
||||
* @param possiblePaths - The possible locations of the package.json file, relative to the current file
|
||||
* @returns The absolute path to the package.json file
|
||||
*/
|
||||
export function getPackageJsonPath(possiblePaths = ['../package.json', '../../package.json']): string {
|
||||
for (const location of possiblePaths) {
|
||||
const packageJsonPath = getAbsolutePath(location);
|
||||
if (fileExistsSync(packageJsonPath)) {
|
||||
return packageJsonPath;
|
||||
}
|
||||
}
|
||||
throw new Error(
|
||||
`Could not find package.json in any of the expected locations: ${possiblePaths.join(', ')}`
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the Unraid API package.json. Throws if unable to find.
|
||||
* Retrieves the Unraid API package.json. Throws if unable to find or parse.
|
||||
* This should be considered a fatal error.
|
||||
*
|
||||
* @param pathOverride - The path to the package.json file. If not provided, the default path will be found & used.
|
||||
* @returns The package.json object
|
||||
*/
|
||||
export const getPackageJson = () => {
|
||||
const packageJson = readPackageJson('../package.json') || readPackageJson('../../package.json');
|
||||
if (!packageJson) {
|
||||
throw new Error('Could not find package.json in any of the expected locations');
|
||||
}
|
||||
return packageJson as SetRequired<PackageJson, 'version' | 'dependencies'>;
|
||||
export const getPackageJson = (pathOverride?: string) => {
|
||||
const packageJsonPath = pathOverride ?? getPackageJsonPath();
|
||||
const packageJsonRaw = readFileSync(packageJsonPath, 'utf-8');
|
||||
return JSON.parse(packageJsonRaw) as SetRequired<PackageJson, 'version' | 'dependencies'>;
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -86,3 +95,4 @@ export const MOTHERSHIP_GRAPHQL_LINK = process.env.MOTHERSHIP_GRAPHQL_LINK
|
||||
: 'https://mothership.unraid.net/ws';
|
||||
|
||||
export const PM2_HOME = process.env.PM2_HOME ?? join(homedir(), '.pm2');
|
||||
export const PATHS_CONFIG_MODULES = process.env.PATHS_CONFIG_MODULES!;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -14,14 +14,14 @@ import type { TypedDocumentNode as DocumentNode } from '@graphql-typed-document-
|
||||
* Learn more about it here: https://the-guild.dev/graphql/codegen/plugins/presets/preset-client#reducing-bundle-size
|
||||
*/
|
||||
type Documents = {
|
||||
"\n mutation sendRemoteGraphQLResponse($input: RemoteGraphQLServerInput!) {\n remoteGraphQLResponse(input: $input)\n }\n": typeof types.sendRemoteGraphQLResponseDocument,
|
||||
"\n fragment RemoteGraphQLEventFragment on RemoteGraphQLEvent {\n remoteGraphQLEventData: data {\n type\n body\n sha256\n }\n }\n": typeof types.RemoteGraphQLEventFragmentFragmentDoc,
|
||||
"\n subscription events {\n events {\n __typename\n ... on ClientConnectedEvent {\n connectedData: data {\n type\n version\n apiKey\n }\n connectedEvent: type\n }\n ... on ClientDisconnectedEvent {\n disconnectedData: data {\n type\n version\n apiKey\n }\n disconnectedEvent: type\n }\n ...RemoteGraphQLEventFragment\n }\n }\n": typeof types.eventsDocument,
|
||||
"\n mutation sendRemoteGraphQLResponse($input: RemoteGraphQLServerInput!) {\n remoteGraphQLResponse(input: $input)\n }\n": typeof types.SendRemoteGraphQlResponseDocument,
|
||||
"\n fragment RemoteGraphQLEventFragment on RemoteGraphQLEvent {\n remoteGraphQLEventData: data {\n type\n body\n sha256\n }\n }\n": typeof types.RemoteGraphQlEventFragmentFragmentDoc,
|
||||
"\n subscription events {\n events {\n __typename\n ... on ClientConnectedEvent {\n connectedData: data {\n type\n version\n apiKey\n }\n connectedEvent: type\n }\n ... on ClientDisconnectedEvent {\n disconnectedData: data {\n type\n version\n apiKey\n }\n disconnectedEvent: type\n }\n ...RemoteGraphQLEventFragment\n }\n }\n": typeof types.EventsDocument,
|
||||
};
|
||||
const documents: Documents = {
|
||||
"\n mutation sendRemoteGraphQLResponse($input: RemoteGraphQLServerInput!) {\n remoteGraphQLResponse(input: $input)\n }\n": types.sendRemoteGraphQLResponseDocument,
|
||||
"\n fragment RemoteGraphQLEventFragment on RemoteGraphQLEvent {\n remoteGraphQLEventData: data {\n type\n body\n sha256\n }\n }\n": types.RemoteGraphQLEventFragmentFragmentDoc,
|
||||
"\n subscription events {\n events {\n __typename\n ... on ClientConnectedEvent {\n connectedData: data {\n type\n version\n apiKey\n }\n connectedEvent: type\n }\n ... on ClientDisconnectedEvent {\n disconnectedData: data {\n type\n version\n apiKey\n }\n disconnectedEvent: type\n }\n ...RemoteGraphQLEventFragment\n }\n }\n": types.eventsDocument,
|
||||
"\n mutation sendRemoteGraphQLResponse($input: RemoteGraphQLServerInput!) {\n remoteGraphQLResponse(input: $input)\n }\n": types.SendRemoteGraphQlResponseDocument,
|
||||
"\n fragment RemoteGraphQLEventFragment on RemoteGraphQLEvent {\n remoteGraphQLEventData: data {\n type\n body\n sha256\n }\n }\n": types.RemoteGraphQlEventFragmentFragmentDoc,
|
||||
"\n subscription events {\n events {\n __typename\n ... on ClientConnectedEvent {\n connectedData: data {\n type\n version\n apiKey\n }\n connectedEvent: type\n }\n ... on ClientDisconnectedEvent {\n disconnectedData: data {\n type\n version\n apiKey\n }\n disconnectedEvent: type\n }\n ...RemoteGraphQLEventFragment\n }\n }\n": types.EventsDocument,
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
@@ -35,14 +35,14 @@ export type AccessUrl = {
|
||||
ipv4?: Maybe<Scalars['URL']['output']>;
|
||||
ipv6?: Maybe<Scalars['URL']['output']>;
|
||||
name?: Maybe<Scalars['String']['output']>;
|
||||
type: URL_TYPE;
|
||||
type: UrlType;
|
||||
};
|
||||
|
||||
export type AccessUrlInput = {
|
||||
ipv4?: InputMaybe<Scalars['URL']['input']>;
|
||||
ipv6?: InputMaybe<Scalars['URL']['input']>;
|
||||
name?: InputMaybe<Scalars['String']['input']>;
|
||||
type: URL_TYPE;
|
||||
type: UrlType;
|
||||
};
|
||||
|
||||
export type ArrayCapacity = {
|
||||
@@ -305,7 +305,7 @@ export type DashboardVmsInput = {
|
||||
started: Scalars['Int']['input'];
|
||||
};
|
||||
|
||||
export type Event = ClientConnectedEvent | ClientDisconnectedEvent | ClientPingEvent | RemoteAccessEvent | RemoteGraphQLEvent | UpdateEvent;
|
||||
export type Event = ClientConnectedEvent | ClientDisconnectedEvent | ClientPingEvent | RemoteAccessEvent | RemoteGraphQlEvent | UpdateEvent;
|
||||
|
||||
export enum EventType {
|
||||
CLIENT_CONNECTED_EVENT = 'CLIENT_CONNECTED_EVENT',
|
||||
@@ -373,32 +373,32 @@ export type Mutation = {
|
||||
};
|
||||
|
||||
|
||||
export type MutationremoteGraphQLResponseArgs = {
|
||||
input: RemoteGraphQLServerInput;
|
||||
export type MutationRemoteGraphQlResponseArgs = {
|
||||
input: RemoteGraphQlServerInput;
|
||||
};
|
||||
|
||||
|
||||
export type MutationremoteMutationArgs = {
|
||||
input: RemoteGraphQLClientInput;
|
||||
export type MutationRemoteMutationArgs = {
|
||||
input: RemoteGraphQlClientInput;
|
||||
};
|
||||
|
||||
|
||||
export type MutationremoteSessionArgs = {
|
||||
export type MutationRemoteSessionArgs = {
|
||||
remoteAccess: RemoteAccessInput;
|
||||
};
|
||||
|
||||
|
||||
export type MutationsendNotificationArgs = {
|
||||
export type MutationSendNotificationArgs = {
|
||||
notification: NotificationInput;
|
||||
};
|
||||
|
||||
|
||||
export type MutationupdateDashboardArgs = {
|
||||
export type MutationUpdateDashboardArgs = {
|
||||
data: DashboardInput;
|
||||
};
|
||||
|
||||
|
||||
export type MutationupdateNetworkArgs = {
|
||||
export type MutationUpdateNetworkArgs = {
|
||||
data: NetworkInput;
|
||||
};
|
||||
|
||||
@@ -474,17 +474,17 @@ export type Query = {
|
||||
};
|
||||
|
||||
|
||||
export type QuerydashboardArgs = {
|
||||
export type QueryDashboardArgs = {
|
||||
id: Scalars['String']['input'];
|
||||
};
|
||||
|
||||
|
||||
export type QueryremoteQueryArgs = {
|
||||
input: RemoteGraphQLClientInput;
|
||||
export type QueryRemoteQueryArgs = {
|
||||
input: RemoteGraphQlClientInput;
|
||||
};
|
||||
|
||||
|
||||
export type QueryserverStatusArgs = {
|
||||
export type QueryServerStatusArgs = {
|
||||
apiKey: Scalars['String']['input'];
|
||||
};
|
||||
|
||||
@@ -557,7 +557,7 @@ export type RemoteAccessInput = {
|
||||
url?: InputMaybe<AccessUrlInput>;
|
||||
};
|
||||
|
||||
export type RemoteGraphQLClientInput = {
|
||||
export type RemoteGraphQlClientInput = {
|
||||
apiKey: Scalars['String']['input'];
|
||||
body: Scalars['String']['input'];
|
||||
/** Time in milliseconds to wait for a response from the remote server (defaults to 15000) */
|
||||
@@ -566,34 +566,34 @@ export type RemoteGraphQLClientInput = {
|
||||
ttl?: InputMaybe<Scalars['Int']['input']>;
|
||||
};
|
||||
|
||||
export type RemoteGraphQLEvent = {
|
||||
export type RemoteGraphQlEvent = {
|
||||
__typename?: 'RemoteGraphQLEvent';
|
||||
data: RemoteGraphQLEventData;
|
||||
data: RemoteGraphQlEventData;
|
||||
type: EventType;
|
||||
};
|
||||
|
||||
export type RemoteGraphQLEventData = {
|
||||
export type RemoteGraphQlEventData = {
|
||||
__typename?: 'RemoteGraphQLEventData';
|
||||
/** Contains mutation / subscription / query data in the form of body: JSON, variables: JSON */
|
||||
body: Scalars['String']['output'];
|
||||
/** sha256 hash of the body */
|
||||
sha256: Scalars['String']['output'];
|
||||
type: RemoteGraphQLEventType;
|
||||
type: RemoteGraphQlEventType;
|
||||
};
|
||||
|
||||
export enum RemoteGraphQLEventType {
|
||||
export enum RemoteGraphQlEventType {
|
||||
REMOTE_MUTATION_EVENT = 'REMOTE_MUTATION_EVENT',
|
||||
REMOTE_QUERY_EVENT = 'REMOTE_QUERY_EVENT',
|
||||
REMOTE_SUBSCRIPTION_EVENT = 'REMOTE_SUBSCRIPTION_EVENT',
|
||||
REMOTE_SUBSCRIPTION_EVENT_PING = 'REMOTE_SUBSCRIPTION_EVENT_PING'
|
||||
}
|
||||
|
||||
export type RemoteGraphQLServerInput = {
|
||||
export type RemoteGraphQlServerInput = {
|
||||
/** Body - contains an object containing data: (GQL response data) or errors: (GQL Errors) */
|
||||
body: Scalars['String']['input'];
|
||||
/** sha256 hash of the body */
|
||||
sha256: Scalars['String']['input'];
|
||||
type: RemoteGraphQLEventType;
|
||||
type: RemoteGraphQlEventType;
|
||||
};
|
||||
|
||||
export type Server = {
|
||||
@@ -654,8 +654,8 @@ export type Subscription = {
|
||||
};
|
||||
|
||||
|
||||
export type SubscriptionremoteSubscriptionArgs = {
|
||||
input: RemoteGraphQLClientInput;
|
||||
export type SubscriptionRemoteSubscriptionArgs = {
|
||||
input: RemoteGraphQlClientInput;
|
||||
};
|
||||
|
||||
export type TwoFactorLocal = {
|
||||
@@ -681,7 +681,7 @@ export type TwoFactorWithoutToken = {
|
||||
remote?: Maybe<TwoFactorRemote>;
|
||||
};
|
||||
|
||||
export enum URL_TYPE {
|
||||
export enum UrlType {
|
||||
DEFAULT = 'DEFAULT',
|
||||
LAN = 'LAN',
|
||||
MDNS = 'MDNS',
|
||||
@@ -726,23 +726,23 @@ export type Vars = {
|
||||
regTy?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type sendRemoteGraphQLResponseMutationVariables = Exact<{
|
||||
input: RemoteGraphQLServerInput;
|
||||
export type SendRemoteGraphQlResponseMutationVariables = Exact<{
|
||||
input: RemoteGraphQlServerInput;
|
||||
}>;
|
||||
|
||||
|
||||
export type sendRemoteGraphQLResponseMutation = { __typename?: 'Mutation', remoteGraphQLResponse: boolean };
|
||||
export type SendRemoteGraphQlResponseMutation = { __typename?: 'Mutation', remoteGraphQLResponse: boolean };
|
||||
|
||||
export type RemoteGraphQLEventFragmentFragment = { __typename?: 'RemoteGraphQLEvent', remoteGraphQLEventData: { __typename?: 'RemoteGraphQLEventData', type: RemoteGraphQLEventType, body: string, sha256: string } } & { ' $fragmentName'?: 'RemoteGraphQLEventFragmentFragment' };
|
||||
export type RemoteGraphQlEventFragmentFragment = { __typename?: 'RemoteGraphQLEvent', remoteGraphQLEventData: { __typename?: 'RemoteGraphQLEventData', type: RemoteGraphQlEventType, body: string, sha256: string } } & { ' $fragmentName'?: 'RemoteGraphQlEventFragmentFragment' };
|
||||
|
||||
export type eventsSubscriptionVariables = Exact<{ [key: string]: never; }>;
|
||||
export type EventsSubscriptionVariables = Exact<{ [key: string]: never; }>;
|
||||
|
||||
|
||||
export type eventsSubscription = { __typename?: 'Subscription', events?: Array<{ __typename: 'ClientConnectedEvent', connectedEvent: EventType, connectedData: { __typename?: 'ClientConnectionEventData', type: ClientType, version: string, apiKey: string } } | { __typename: 'ClientDisconnectedEvent', disconnectedEvent: EventType, disconnectedData: { __typename?: 'ClientConnectionEventData', type: ClientType, version: string, apiKey: string } } | { __typename: 'ClientPingEvent' } | { __typename: 'RemoteAccessEvent' } | (
|
||||
export type EventsSubscription = { __typename?: 'Subscription', events?: Array<{ __typename: 'ClientConnectedEvent', connectedEvent: EventType, connectedData: { __typename?: 'ClientConnectionEventData', type: ClientType, version: string, apiKey: string } } | { __typename: 'ClientDisconnectedEvent', disconnectedEvent: EventType, disconnectedData: { __typename?: 'ClientConnectionEventData', type: ClientType, version: string, apiKey: string } } | { __typename: 'ClientPingEvent' } | { __typename: 'RemoteAccessEvent' } | (
|
||||
{ __typename: 'RemoteGraphQLEvent' }
|
||||
& { ' $fragmentRefs'?: { 'RemoteGraphQLEventFragmentFragment': RemoteGraphQLEventFragmentFragment } }
|
||||
& { ' $fragmentRefs'?: { 'RemoteGraphQlEventFragmentFragment': RemoteGraphQlEventFragmentFragment } }
|
||||
) | { __typename: 'UpdateEvent' }> | null };
|
||||
|
||||
export const RemoteGraphQLEventFragmentFragmentDoc = {"kind":"Document","definitions":[{"kind":"FragmentDefinition","name":{"kind":"Name","value":"RemoteGraphQLEventFragment"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"RemoteGraphQLEvent"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","alias":{"kind":"Name","value":"remoteGraphQLEventData"},"name":{"kind":"Name","value":"data"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"type"}},{"kind":"Field","name":{"kind":"Name","value":"body"}},{"kind":"Field","name":{"kind":"Name","value":"sha256"}}]}}]}}]} as unknown as DocumentNode<RemoteGraphQLEventFragmentFragment, unknown>;
|
||||
export const sendRemoteGraphQLResponseDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"sendRemoteGraphQLResponse"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"input"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"RemoteGraphQLServerInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"remoteGraphQLResponse"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"input"},"value":{"kind":"Variable","name":{"kind":"Name","value":"input"}}}]}]}}]} as unknown as DocumentNode<sendRemoteGraphQLResponseMutation, sendRemoteGraphQLResponseMutationVariables>;
|
||||
export const eventsDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"subscription","name":{"kind":"Name","value":"events"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"events"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"__typename"}},{"kind":"InlineFragment","typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"ClientConnectedEvent"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","alias":{"kind":"Name","value":"connectedData"},"name":{"kind":"Name","value":"data"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"type"}},{"kind":"Field","name":{"kind":"Name","value":"version"}},{"kind":"Field","name":{"kind":"Name","value":"apiKey"}}]}},{"kind":"Field","alias":{"kind":"Name","value":"connectedEvent"},"name":{"kind":"Name","value":"type"}}]}},{"kind":"InlineFragment","typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"ClientDisconnectedEvent"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","alias":{"kind":"Name","value":"disconnectedData"},"name":{"kind":"Name","value":"data"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"type"}},{"kind":"Field","name":{"kind":"Name","value":"version"}},{"kind":"Field","name":{"kind":"Name","value":"apiKey"}}]}},{"kind":"Field","alias":{"kind":"Name","value":"disconnectedEvent"},"name":{"kind":"Name","value":"type"}}]}},{"kind":"FragmentSpread","name":{"kind":"Name","value":"RemoteGraphQLEventFragment"}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"RemoteGraphQLEventFragment"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"RemoteGraphQLEvent"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","alias":{"kind":"Name","value":"remoteGraphQLEventData"},"name":{"kind":"Name","value":"data"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"type"}},{"kind":"Field","name":{"kind":"Name","value":"body"}},{"kind":"Field","name":{"kind":"Name","value":"sha256"}}]}}]}}]} as unknown as DocumentNode<eventsSubscription, eventsSubscriptionVariables>;
|
||||
export const RemoteGraphQlEventFragmentFragmentDoc = {"kind":"Document","definitions":[{"kind":"FragmentDefinition","name":{"kind":"Name","value":"RemoteGraphQLEventFragment"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"RemoteGraphQLEvent"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","alias":{"kind":"Name","value":"remoteGraphQLEventData"},"name":{"kind":"Name","value":"data"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"type"}},{"kind":"Field","name":{"kind":"Name","value":"body"}},{"kind":"Field","name":{"kind":"Name","value":"sha256"}}]}}]}}]} as unknown as DocumentNode<RemoteGraphQlEventFragmentFragment, unknown>;
|
||||
export const SendRemoteGraphQlResponseDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"sendRemoteGraphQLResponse"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"input"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"RemoteGraphQLServerInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"remoteGraphQLResponse"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"input"},"value":{"kind":"Variable","name":{"kind":"Name","value":"input"}}}]}]}}]} as unknown as DocumentNode<SendRemoteGraphQlResponseMutation, SendRemoteGraphQlResponseMutationVariables>;
|
||||
export const EventsDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"subscription","name":{"kind":"Name","value":"events"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"events"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"__typename"}},{"kind":"InlineFragment","typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"ClientConnectedEvent"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","alias":{"kind":"Name","value":"connectedData"},"name":{"kind":"Name","value":"data"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"type"}},{"kind":"Field","name":{"kind":"Name","value":"version"}},{"kind":"Field","name":{"kind":"Name","value":"apiKey"}}]}},{"kind":"Field","alias":{"kind":"Name","value":"connectedEvent"},"name":{"kind":"Name","value":"type"}}]}},{"kind":"InlineFragment","typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"ClientDisconnectedEvent"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","alias":{"kind":"Name","value":"disconnectedData"},"name":{"kind":"Name","value":"data"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"type"}},{"kind":"Field","name":{"kind":"Name","value":"version"}},{"kind":"Field","name":{"kind":"Name","value":"apiKey"}}]}},{"kind":"Field","alias":{"kind":"Name","value":"disconnectedEvent"},"name":{"kind":"Name","value":"type"}}]}},{"kind":"FragmentSpread","name":{"kind":"Name","value":"RemoteGraphQLEventFragment"}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"RemoteGraphQLEventFragment"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"RemoteGraphQLEvent"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","alias":{"kind":"Name","value":"remoteGraphQLEventData"},"name":{"kind":"Name","value":"data"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"type"}},{"kind":"Field","name":{"kind":"Name","value":"body"}},{"kind":"Field","name":{"kind":"Name","value":"sha256"}}]}}]}}]} as unknown as DocumentNode<EventsSubscription, EventsSubscriptionVariables>;
|
||||
@@ -1,6 +1,6 @@
|
||||
/* eslint-disable */
|
||||
import { z } from 'zod'
|
||||
import { AccessUrlInput, ArrayCapacityBytesInput, ArrayCapacityInput, ClientType, ConfigErrorState, DashboardAppsInput, DashboardArrayInput, DashboardCaseInput, DashboardConfigInput, DashboardDisplayInput, DashboardInput, DashboardOsInput, DashboardServiceInput, DashboardServiceUptimeInput, DashboardTwoFactorInput, DashboardTwoFactorLocalInput, DashboardTwoFactorRemoteInput, DashboardVarsInput, DashboardVersionsInput, DashboardVmsInput, EventType, Importance, NetworkInput, NotificationInput, NotificationStatus, PingEventSource, RegistrationState, RemoteAccessEventActionType, RemoteAccessInput, RemoteGraphQLClientInput, RemoteGraphQLEventType, RemoteGraphQLServerInput, ServerStatus, URL_TYPE, UpdateType } from '@app/graphql/generated/client/graphql.js'
|
||||
import { AccessUrlInput, ArrayCapacityBytesInput, ArrayCapacityInput, ClientType, ConfigErrorState, DashboardAppsInput, DashboardArrayInput, DashboardCaseInput, DashboardConfigInput, DashboardDisplayInput, DashboardInput, DashboardOsInput, DashboardServiceInput, DashboardServiceUptimeInput, DashboardTwoFactorInput, DashboardTwoFactorLocalInput, DashboardTwoFactorRemoteInput, DashboardVarsInput, DashboardVersionsInput, DashboardVmsInput, EventType, Importance, NetworkInput, NotificationInput, NotificationStatus, PingEventSource, RegistrationState, RemoteAccessEventActionType, RemoteAccessInput, RemoteGraphQlClientInput, RemoteGraphQlEventType, RemoteGraphQlServerInput, ServerStatus, UrlType, UpdateType } from '@app/graphql/generated/client/graphql.js'
|
||||
|
||||
type Properties<T> = Required<{
|
||||
[K in keyof T]: z.ZodType<T[K], any, T[K]>;
|
||||
@@ -28,11 +28,11 @@ export const RegistrationStateSchema = z.nativeEnum(RegistrationState);
|
||||
|
||||
export const RemoteAccessEventActionTypeSchema = z.nativeEnum(RemoteAccessEventActionType);
|
||||
|
||||
export const RemoteGraphQLEventTypeSchema = z.nativeEnum(RemoteGraphQLEventType);
|
||||
export const RemoteGraphQlEventTypeSchema = z.nativeEnum(RemoteGraphQlEventType);
|
||||
|
||||
export const ServerStatusSchema = z.nativeEnum(ServerStatus);
|
||||
|
||||
export const URL_TYPESchema = z.nativeEnum(URL_TYPE);
|
||||
export const UrlTypeSchema = z.nativeEnum(UrlType);
|
||||
|
||||
export const UpdateTypeSchema = z.nativeEnum(UpdateType);
|
||||
|
||||
@@ -41,7 +41,7 @@ export function AccessUrlInputSchema(): z.ZodObject<Properties<AccessUrlInput>>
|
||||
ipv4: z.instanceof(URL).nullish(),
|
||||
ipv6: z.instanceof(URL).nullish(),
|
||||
name: z.string().nullish(),
|
||||
type: URL_TYPESchema
|
||||
type: UrlTypeSchema
|
||||
})
|
||||
}
|
||||
|
||||
@@ -198,7 +198,7 @@ export function RemoteAccessInputSchema(): z.ZodObject<Properties<RemoteAccessIn
|
||||
})
|
||||
}
|
||||
|
||||
export function RemoteGraphQLClientInputSchema(): z.ZodObject<Properties<RemoteGraphQLClientInput>> {
|
||||
export function RemoteGraphQlClientInputSchema(): z.ZodObject<Properties<RemoteGraphQlClientInput>> {
|
||||
return z.object({
|
||||
apiKey: z.string(),
|
||||
body: z.string(),
|
||||
@@ -207,10 +207,10 @@ export function RemoteGraphQLClientInputSchema(): z.ZodObject<Properties<RemoteG
|
||||
})
|
||||
}
|
||||
|
||||
export function RemoteGraphQLServerInputSchema(): z.ZodObject<Properties<RemoteGraphQLServerInput>> {
|
||||
export function RemoteGraphQlServerInputSchema(): z.ZodObject<Properties<RemoteGraphQlServerInput>> {
|
||||
return z.object({
|
||||
body: z.string(),
|
||||
sha256: z.string(),
|
||||
type: RemoteGraphQLEventTypeSchema
|
||||
type: RemoteGraphQlEventTypeSchema
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,58 +0,0 @@
|
||||
import { decodeJwt } from 'jose';
|
||||
|
||||
import type { ConnectSignInInput } from '@app/graphql/generated/api/types.js';
|
||||
import { getters, store } from '@app/store/index.js';
|
||||
import { loginUser } from '@app/store/modules/config.js';
|
||||
import { FileLoadStatus } from '@app/store/types.js';
|
||||
import { ApiKeyService } from '@app/unraid-api/auth/api-key.service.js';
|
||||
|
||||
export const connectSignIn = async (input: ConnectSignInInput): Promise<boolean> => {
|
||||
if (getters.emhttp().status === FileLoadStatus.LOADED) {
|
||||
const userInfo = input.idToken ? decodeJwt(input.idToken) : (input.userInfo ?? null);
|
||||
|
||||
if (
|
||||
!userInfo ||
|
||||
!userInfo.preferred_username ||
|
||||
!userInfo.email ||
|
||||
typeof userInfo.preferred_username !== 'string' ||
|
||||
typeof userInfo.email !== 'string'
|
||||
) {
|
||||
throw new Error('Missing User Attributes');
|
||||
}
|
||||
|
||||
try {
|
||||
const { remote } = getters.config();
|
||||
const { localApiKey: localApiKeyFromConfig } = remote;
|
||||
|
||||
let localApiKeyToUse = localApiKeyFromConfig;
|
||||
|
||||
if (localApiKeyFromConfig == '') {
|
||||
const apiKeyService = new ApiKeyService();
|
||||
// Create local API key
|
||||
const localApiKey = await apiKeyService.createLocalConnectApiKey();
|
||||
|
||||
if (!localApiKey?.key) {
|
||||
throw new Error('Failed to create local API key');
|
||||
}
|
||||
|
||||
localApiKeyToUse = localApiKey.key;
|
||||
}
|
||||
|
||||
await store.dispatch(
|
||||
loginUser({
|
||||
avatar: typeof userInfo.avatar === 'string' ? userInfo.avatar : '',
|
||||
username: userInfo.preferred_username,
|
||||
email: userInfo.email,
|
||||
apikey: input.apiKey,
|
||||
localApiKey: localApiKeyToUse,
|
||||
})
|
||||
);
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to login user: ${error}`);
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
@@ -1,5 +1,5 @@
|
||||
import { logger } from '@app/core/log.js';
|
||||
import { type ApiKeyResponse } from '@app/graphql/generated/api/types.js';
|
||||
import { type ApiKeyResponse } from '@app/unraid-api/graph/resolvers/cloud/cloud.model.js';
|
||||
|
||||
export const checkApi = async (): Promise<ApiKeyResponse> => {
|
||||
logger.trace('Cloud endpoint: Checking API');
|
||||
|
||||
@@ -1,15 +1,14 @@
|
||||
import { got } from 'got';
|
||||
|
||||
import type { CloudResponse } from '@app/graphql/generated/api/types.js';
|
||||
import { FIVE_DAYS_SECS, ONE_DAY_SECS } from '@app/consts.js';
|
||||
import { logger } from '@app/core/log.js';
|
||||
import { API_VERSION, MOTHERSHIP_GRAPHQL_LINK } from '@app/environment.js';
|
||||
import { MinigraphStatus } from '@app/graphql/generated/api/types.js';
|
||||
import { checkDNS } from '@app/graphql/resolvers/query/cloud/check-dns.js';
|
||||
import { checkMothershipAuthentication } from '@app/graphql/resolvers/query/cloud/check-mothership-authentication.js';
|
||||
import { getCloudCache, getDnsCache } from '@app/store/getters/index.js';
|
||||
import { getters, store } from '@app/store/index.js';
|
||||
import { setCloudCheck, setDNSCheck } from '@app/store/modules/cache.js';
|
||||
import { CloudResponse, MinigraphStatus } from '@app/unraid-api/graph/resolvers/cloud/cloud.model.js';
|
||||
|
||||
const mothershipBaseUrl = new URL(MOTHERSHIP_GRAPHQL_LINK).origin;
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { logger } from '@app/core/log.js';
|
||||
import { type MinigraphqlResponse } from '@app/graphql/generated/api/types.js';
|
||||
import { getters } from '@app/store/index.js';
|
||||
import { MinigraphqlResponse } from '@app/unraid-api/graph/resolvers/cloud/cloud.model.js';
|
||||
|
||||
export const checkMinigraphql = (): MinigraphqlResponse => {
|
||||
logger.trace('Cloud endpoint: Checking mini-graphql');
|
||||
|
||||
@@ -19,20 +19,20 @@ import { sanitizeVendor } from '@app/core/utils/vms/domain/sanitize-vendor.js';
|
||||
import { vmRegExps } from '@app/core/utils/vms/domain/vm-regexps.js';
|
||||
import { filterDevices } from '@app/core/utils/vms/filter-devices.js';
|
||||
import { getPciDevices } from '@app/core/utils/vms/get-pci-devices.js';
|
||||
import {
|
||||
type Devices,
|
||||
type Display,
|
||||
type Gpu,
|
||||
type InfoApps,
|
||||
type InfoCpu,
|
||||
type InfoMemory,
|
||||
type Os as InfoOs,
|
||||
type MemoryLayout,
|
||||
type Temperature,
|
||||
type Theme,
|
||||
type Versions,
|
||||
} from '@app/graphql/generated/api/types.js';
|
||||
import { getters } from '@app/store/index.js';
|
||||
import {
|
||||
Devices,
|
||||
Display,
|
||||
Gpu,
|
||||
InfoApps,
|
||||
InfoCpu,
|
||||
InfoMemory,
|
||||
Os as InfoOs,
|
||||
MemoryLayout,
|
||||
Temperature,
|
||||
Theme,
|
||||
Versions,
|
||||
} from '@app/unraid-api/graph/resolvers/info/info.model.js';
|
||||
|
||||
export const generateApps = async (): Promise<InfoApps> => {
|
||||
const installed = await docker
|
||||
@@ -43,13 +43,14 @@ export const generateApps = async (): Promise<InfoApps> => {
|
||||
.listContainers()
|
||||
.catch(() => [])
|
||||
.then((containers) => containers.length);
|
||||
return { installed, started };
|
||||
return { id: 'info/apps', installed, started };
|
||||
};
|
||||
|
||||
export const generateOs = async (): Promise<InfoOs> => {
|
||||
const os = await osInfo();
|
||||
|
||||
return {
|
||||
id: 'info/os',
|
||||
...os,
|
||||
hostname: getters.emhttp().var.name,
|
||||
uptime: bootTimestamp.toISOString(),
|
||||
@@ -63,6 +64,7 @@ export const generateCpu = async (): Promise<InfoCpu> => {
|
||||
.catch(() => []);
|
||||
|
||||
return {
|
||||
id: 'info/cpu',
|
||||
...rest,
|
||||
cores: physicalCores,
|
||||
threads: cores,
|
||||
@@ -94,8 +96,8 @@ export const generateDisplay = async (): Promise<Display> => {
|
||||
}
|
||||
const { theme, unit, ...display } = state.display;
|
||||
return {
|
||||
...display,
|
||||
id: 'dynamix-config/display',
|
||||
...display,
|
||||
theme: theme as Theme,
|
||||
unit: unit as Temperature,
|
||||
scale: toBoolean(display.scale),
|
||||
@@ -118,6 +120,7 @@ export const generateVersions = async (): Promise<Versions> => {
|
||||
const softwareVersions = await versions();
|
||||
|
||||
return {
|
||||
id: 'info/versions',
|
||||
unraid,
|
||||
...softwareVersions,
|
||||
};
|
||||
@@ -165,6 +168,7 @@ export const generateMemory = async (): Promise<InfoMemory> => {
|
||||
}
|
||||
|
||||
return {
|
||||
id: 'info/memory',
|
||||
layout,
|
||||
max,
|
||||
...info,
|
||||
@@ -410,10 +414,9 @@ export const generateDevices = async (): Promise<Devices> => {
|
||||
};
|
||||
|
||||
return {
|
||||
id: 'info/devices',
|
||||
// Scsi: await scsiDevices,
|
||||
gpu: await systemGPUDevices,
|
||||
// Move this to interfaces
|
||||
// network: await si.networkInterfaces(),
|
||||
pci: await systemPciDevices(),
|
||||
usb: await getSystemUSBDevices(),
|
||||
};
|
||||
|
||||
@@ -1,11 +1,8 @@
|
||||
import type { AccessUrlInput } from '@app/graphql/generated/client/graphql.js';
|
||||
import type { RootState } from '@app/store/index.js';
|
||||
import { logger } from '@app/core/log.js';
|
||||
import { type Nginx } from '@app/core/types/states/nginx.js';
|
||||
import { type AccessUrl } from '@app/graphql/generated/api/types.js';
|
||||
import { URL_TYPE } from '@app/graphql/generated/client/graphql.js';
|
||||
import { AccessUrlInputSchema } from '@app/graphql/generated/client/validators.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { AccessUrl, URL_TYPE } from '@app/unraid-api/graph/resolvers/connect/connect.model.js';
|
||||
|
||||
interface UrlForFieldInput {
|
||||
url: string;
|
||||
@@ -126,7 +123,7 @@ export const getServerIps = (
|
||||
}
|
||||
|
||||
const errors: Error[] = [];
|
||||
const urls: AccessUrlInput[] = [];
|
||||
const urls: AccessUrl[] = [];
|
||||
|
||||
try {
|
||||
// Default URL
|
||||
@@ -232,16 +229,5 @@ export const getServerIps = (
|
||||
}
|
||||
});
|
||||
|
||||
const safeUrls = urls
|
||||
.map((url) => AccessUrlInputSchema().safeParse(url))
|
||||
.reduce<AccessUrlInput[]>((acc, curr) => {
|
||||
if (curr.success) {
|
||||
acc.push(curr.data);
|
||||
} else {
|
||||
errors.push(curr.error);
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
return { urls: safeUrls, errors };
|
||||
return { urls, errors };
|
||||
};
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import type { RemoteGraphQLEventFragmentFragment } from '@app/graphql/generated/client/graphql.js';
|
||||
import type { RemoteGraphQlEventFragmentFragment } from '@app/graphql/generated/client/graphql.js';
|
||||
import { remoteQueryLogger } from '@app/core/log.js';
|
||||
import { getApiApolloClient } from '@app/graphql/client/api/get-api-client.js';
|
||||
import { RemoteGraphQLEventType } from '@app/graphql/generated/client/graphql.js';
|
||||
import { RemoteGraphQlEventType } from '@app/graphql/generated/client/graphql.js';
|
||||
import { SEND_REMOTE_QUERY_RESPONSE } from '@app/graphql/mothership/mutations.js';
|
||||
import { parseGraphQLQuery } from '@app/graphql/resolvers/subscription/remote-graphql/remote-graphql-helpers.js';
|
||||
import { GraphQLClient } from '@app/mothership/graphql-client.js';
|
||||
import { getters } from '@app/store/index.js';
|
||||
|
||||
export const executeRemoteGraphQLQuery = async (
|
||||
data: RemoteGraphQLEventFragmentFragment['remoteGraphQLEventData']
|
||||
data: RemoteGraphQlEventFragmentFragment['remoteGraphQLEventData']
|
||||
) => {
|
||||
remoteQueryLogger.debug({ query: data }, 'Executing remote query');
|
||||
const client = GraphQLClient.getInstance();
|
||||
@@ -44,7 +44,7 @@ export const executeRemoteGraphQLQuery = async (
|
||||
input: {
|
||||
sha256: data.sha256,
|
||||
body: JSON.stringify({ data: localResult.data }),
|
||||
type: RemoteGraphQLEventType.REMOTE_QUERY_EVENT,
|
||||
type: RemoteGraphQlEventType.REMOTE_QUERY_EVENT,
|
||||
},
|
||||
},
|
||||
errorPolicy: 'none',
|
||||
@@ -57,7 +57,7 @@ export const executeRemoteGraphQLQuery = async (
|
||||
input: {
|
||||
sha256: data.sha256,
|
||||
body: JSON.stringify({ errors: localResult.error }),
|
||||
type: RemoteGraphQLEventType.REMOTE_QUERY_EVENT,
|
||||
type: RemoteGraphQlEventType.REMOTE_QUERY_EVENT,
|
||||
},
|
||||
},
|
||||
});
|
||||
@@ -70,7 +70,7 @@ export const executeRemoteGraphQLQuery = async (
|
||||
input: {
|
||||
sha256: data.sha256,
|
||||
body: JSON.stringify({ errors: err }),
|
||||
type: RemoteGraphQLEventType.REMOTE_QUERY_EVENT,
|
||||
type: RemoteGraphQlEventType.REMOTE_QUERY_EVENT,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import { type RemoteGraphQLEventFragmentFragment } from '@app/graphql/generated/client/graphql.js';
|
||||
import { type RemoteGraphQlEventFragmentFragment } from '@app/graphql/generated/client/graphql.js';
|
||||
import { addRemoteSubscription } from '@app/store/actions/add-remote-subscription.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
|
||||
export const createRemoteSubscription = async (
|
||||
data: RemoteGraphQLEventFragmentFragment['remoteGraphQLEventData']
|
||||
data: RemoteGraphQlEventFragmentFragment['remoteGraphQLEventData']
|
||||
) => {
|
||||
await store.dispatch(addRemoteSubscription(data));
|
||||
};
|
||||
|
||||
@@ -1,65 +0,0 @@
|
||||
type Permission {
|
||||
resource: Resource!
|
||||
actions: [String!]!
|
||||
}
|
||||
|
||||
type ApiKey {
|
||||
id: ID!
|
||||
name: String!
|
||||
description: String
|
||||
roles: [Role!]!
|
||||
createdAt: DateTime!
|
||||
permissions: [Permission!]!
|
||||
}
|
||||
|
||||
type ApiKeyWithSecret {
|
||||
id: ID!
|
||||
key: String!
|
||||
name: String!
|
||||
description: String
|
||||
roles: [Role!]!
|
||||
createdAt: DateTime!
|
||||
permissions: [Permission!]!
|
||||
}
|
||||
|
||||
input CreateApiKeyInput {
|
||||
name: String!
|
||||
description: String
|
||||
roles: [Role!]
|
||||
permissions: [AddPermissionInput!]
|
||||
""" This will replace the existing key if one already exists with the same name, otherwise returns the existing key """
|
||||
overwrite: Boolean
|
||||
}
|
||||
|
||||
input AddPermissionInput {
|
||||
resource: Resource!
|
||||
actions: [String!]!
|
||||
}
|
||||
|
||||
input AddRoleForUserInput {
|
||||
userId: ID!
|
||||
role: Role!
|
||||
}
|
||||
|
||||
input AddRoleForApiKeyInput {
|
||||
apiKeyId: ID!
|
||||
role: Role!
|
||||
}
|
||||
|
||||
input RemoveRoleFromApiKeyInput {
|
||||
apiKeyId: ID!
|
||||
role: Role!
|
||||
}
|
||||
|
||||
type Mutation {
|
||||
createApiKey(input: CreateApiKeyInput!): ApiKeyWithSecret!
|
||||
addPermission(input: AddPermissionInput!): Boolean!
|
||||
addRoleForUser(input: AddRoleForUserInput!): Boolean!
|
||||
addRoleForApiKey(input: AddRoleForApiKeyInput!): Boolean!
|
||||
removeRoleFromApiKey(input: RemoveRoleFromApiKeyInput!): Boolean!
|
||||
}
|
||||
|
||||
type Query {
|
||||
apiKeys: [ApiKey!]!
|
||||
apiKey(id: ID!): ApiKey
|
||||
}
|
||||
@@ -1,42 +0,0 @@
|
||||
"""
|
||||
Available resources for permissions
|
||||
"""
|
||||
enum Resource {
|
||||
api_key
|
||||
array
|
||||
cloud
|
||||
config
|
||||
connect
|
||||
connect__remote_access
|
||||
customizations
|
||||
dashboard
|
||||
disk
|
||||
display
|
||||
docker
|
||||
flash
|
||||
info
|
||||
logs
|
||||
me
|
||||
network
|
||||
notifications
|
||||
online
|
||||
os
|
||||
owner
|
||||
permission
|
||||
registration
|
||||
servers
|
||||
services
|
||||
share
|
||||
vars
|
||||
vms
|
||||
welcome
|
||||
}
|
||||
|
||||
"""
|
||||
Available roles for API keys and users
|
||||
"""
|
||||
enum Role {
|
||||
admin
|
||||
connect
|
||||
guest
|
||||
}
|
||||
@@ -1,203 +0,0 @@
|
||||
type Query {
|
||||
"""An Unraid array consisting of 1 or 2 Parity disks and a number of Data disks."""
|
||||
array: Array!
|
||||
}
|
||||
|
||||
enum ArrayStateInputState {
|
||||
"""Start array"""
|
||||
START
|
||||
"""Stop array"""
|
||||
STOP
|
||||
}
|
||||
|
||||
input ArrayStateInput {
|
||||
"""Array state"""
|
||||
desiredState: ArrayStateInputState!
|
||||
}
|
||||
|
||||
type ArrayMutations {
|
||||
"""Set array state"""
|
||||
setState(input: ArrayStateInput): Array
|
||||
|
||||
"""Add new disk to array"""
|
||||
addDiskToArray(input: ArrayDiskInput): Array
|
||||
"""Remove existing disk from array. NOTE: The array must be stopped before running this otherwise it'll throw an error."""
|
||||
removeDiskFromArray(input: ArrayDiskInput): Array
|
||||
|
||||
mountArrayDisk(id: ID!): Disk
|
||||
unmountArrayDisk(id: ID!): Disk
|
||||
|
||||
clearArrayDiskStatistics(id: ID!): JSON
|
||||
}
|
||||
|
||||
type Mutation {
|
||||
array: ArrayMutations
|
||||
}
|
||||
|
||||
type Subscription {
|
||||
array: Array!
|
||||
}
|
||||
|
||||
input ArrayDiskInput {
|
||||
"""Disk ID"""
|
||||
id: ID!
|
||||
"""The slot for the disk"""
|
||||
slot: Int
|
||||
}
|
||||
|
||||
type Array implements Node {
|
||||
id: ID!
|
||||
"""Array state before this query/mutation"""
|
||||
previousState: ArrayState
|
||||
"""Array state after this query/mutation"""
|
||||
pendingState: ArrayPendingState
|
||||
"""Current array state"""
|
||||
state: ArrayState!
|
||||
"""Current array capacity"""
|
||||
capacity: ArrayCapacity!
|
||||
"""Current boot disk"""
|
||||
boot: ArrayDisk
|
||||
"""Parity disks in the current array"""
|
||||
parities: [ArrayDisk!]!
|
||||
"""Data disks in the current array"""
|
||||
disks: [ArrayDisk!]!
|
||||
"""Caches in the current array"""
|
||||
caches: [ArrayDisk!]!
|
||||
}
|
||||
|
||||
# /usr/src/linux-5.9.13-Unraid/drivers/md/md_unraid.c
|
||||
enum ArrayState {
|
||||
"""Array is running"""
|
||||
STARTED
|
||||
"""Array has stopped"""
|
||||
STOPPED
|
||||
"""Array has new disks"""
|
||||
NEW_ARRAY
|
||||
"""A disk is being reconstructed"""
|
||||
RECON_DISK
|
||||
"""A disk is disabled in the array"""
|
||||
DISABLE_DISK
|
||||
"""Array is disabled"""
|
||||
SWAP_DSBL
|
||||
"""Too many changes to array at the same time"""
|
||||
INVALID_EXPANSION
|
||||
"""Parity isn't the biggest, can't start array"""
|
||||
PARITY_NOT_BIGGEST
|
||||
"""Array has too many missing data disks"""
|
||||
TOO_MANY_MISSING_DISKS
|
||||
"""Array has new disks they're too small"""
|
||||
NEW_DISK_TOO_SMALL
|
||||
"""Array has no data disks"""
|
||||
NO_DATA_DISKS
|
||||
}
|
||||
|
||||
enum ArrayDiskStatus {
|
||||
""" no disk present, no disk configured """
|
||||
DISK_NP
|
||||
""" enabled, disk present, correct, valid """
|
||||
DISK_OK
|
||||
""" enabled, but missing """
|
||||
DISK_NP_MISSING
|
||||
""" enabled, disk present, but not valid """
|
||||
DISK_INVALID
|
||||
""" enablled, disk present, but not correct disk """
|
||||
DISK_WRONG
|
||||
""" disabled, old disk still present """
|
||||
DISK_DSBL
|
||||
""" disabled, no disk present """
|
||||
DISK_NP_DSBL
|
||||
""" disabled, new disk present """
|
||||
DISK_DSBL_NEW
|
||||
""" new disk """
|
||||
DISK_NEW
|
||||
}
|
||||
|
||||
enum ArrayPendingState {
|
||||
"""Array is starting"""
|
||||
starting
|
||||
"""Array is stopping"""
|
||||
stopping
|
||||
"""Array has no data disks"""
|
||||
no_data_disks
|
||||
"""Array has too many missing data disks"""
|
||||
too_many_missing_disks
|
||||
}
|
||||
|
||||
type ArrayCapacity {
|
||||
kilobytes: Capacity!
|
||||
disks: Capacity!
|
||||
}
|
||||
|
||||
type Capacity {
|
||||
free: String!
|
||||
used: String!
|
||||
total: String!
|
||||
}
|
||||
|
||||
type ArrayDisk {
|
||||
""" Disk indentifier, only set for present disks on the system """
|
||||
id: ID!
|
||||
""" Array slot number. Parity1 is always 0 and Parity2 is always 29. Array slots will be 1 - 28. Cache slots are 30 - 53. Flash is 54. """
|
||||
idx: Int!
|
||||
name: String
|
||||
device: String
|
||||
""" (KB) Disk Size total """
|
||||
size: Long!
|
||||
status: ArrayDiskStatus
|
||||
""" Is the disk a HDD or SSD. """
|
||||
rotational: Boolean
|
||||
""" Disk temp - will be NaN if array is not started or DISK_NP """
|
||||
temp: Int
|
||||
"""Count of I/O read requests sent to the device I/O drivers. These statistics may be cleared at any time."""
|
||||
numReads: Long!
|
||||
"""Count of I/O writes requests sent to the device I/O drivers. These statistics may be cleared at any time."""
|
||||
numWrites: Long!
|
||||
"""Number of unrecoverable errors reported by the device I/O drivers. Missing data due to unrecoverable array read errors is filled in on-the-fly using parity reconstruct (and we attempt to write this data back to the sector(s) which failed). Any unrecoverable write error results in disabling the disk."""
|
||||
numErrors: Long!
|
||||
""" (KB) Total Size of the FS (Not present on Parity type drive) """
|
||||
fsSize: Long
|
||||
""" (KB) Free Size on the FS (Not present on Parity type drive)"""
|
||||
fsFree: Long
|
||||
""" (KB) Used Size on the FS (Not present on Parity type drive)"""
|
||||
fsUsed: Long
|
||||
exportable: Boolean
|
||||
""" Type of Disk - used to differentiate Cache / Flash / Array / Parity """
|
||||
type: ArrayDiskType!
|
||||
""" (%) Disk space left to warn """
|
||||
warning: Int
|
||||
""" (%) Disk space left for critical """
|
||||
critical: Int
|
||||
""" File system type for the disk """
|
||||
fsType: String
|
||||
""" User comment on disk """
|
||||
comment: String
|
||||
""" File format (ex MBR: 4KiB-aligned) """
|
||||
format: String
|
||||
""" ata | nvme | usb | (others)"""
|
||||
transport: String
|
||||
}
|
||||
|
||||
# type ArrayParityDisk {}
|
||||
# type ArrayCacheDisk {}
|
||||
|
||||
enum ArrayDiskType {
|
||||
"""Data disk"""
|
||||
Data
|
||||
"""Parity disk"""
|
||||
Parity
|
||||
"""Flash disk"""
|
||||
Flash
|
||||
"""Cache disk"""
|
||||
Cache
|
||||
}
|
||||
|
||||
enum ArrayDiskFsColor {
|
||||
"""Disk is OK and running"""
|
||||
green_on
|
||||
"""Disk is OK and not running"""
|
||||
green_off
|
||||
yellow_on
|
||||
yellow_off
|
||||
red_on
|
||||
red_off
|
||||
}
|
||||
@@ -1,26 +0,0 @@
|
||||
type Query {
|
||||
parityHistory: [ParityCheck]
|
||||
}
|
||||
|
||||
type Mutation {
|
||||
"""Start parity check"""
|
||||
startParityCheck(correct: Boolean): JSON
|
||||
"""Pause parity check"""
|
||||
pauseParityCheck: JSON
|
||||
"""Resume parity check"""
|
||||
resumeParityCheck: JSON
|
||||
"""Cancel parity check"""
|
||||
cancelParityCheck: JSON
|
||||
}
|
||||
|
||||
type Subscription {
|
||||
parityHistory: ParityCheck!
|
||||
}
|
||||
|
||||
type ParityCheck {
|
||||
date: String!
|
||||
duration: Int!
|
||||
speed: String!
|
||||
status: String!
|
||||
errors: String!
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
scalar JSON
|
||||
scalar Long
|
||||
scalar UUID
|
||||
scalar DateTime
|
||||
scalar Port
|
||||
scalar URL
|
||||
|
||||
type Welcome {
|
||||
message: String!
|
||||
}
|
||||
|
||||
type Query {
|
||||
# This should always be available even for guest users
|
||||
online: Boolean
|
||||
info: Info
|
||||
}
|
||||
|
||||
type Mutation {
|
||||
login(username: String!, password: String!): String
|
||||
shutdown: String
|
||||
reboot: String
|
||||
}
|
||||
|
||||
type Subscription {
|
||||
ping: String!
|
||||
info: Info!
|
||||
online: Boolean!
|
||||
}
|
||||
|
||||
# An object with a Globally Unique ID: see https://graphql.org/learn/global-object-identification/
|
||||
interface Node {
|
||||
id: ID!
|
||||
}
|
||||
@@ -1,43 +0,0 @@
|
||||
type ApiKeyResponse {
|
||||
valid: Boolean!
|
||||
error: String
|
||||
}
|
||||
|
||||
enum MinigraphStatus {
|
||||
PRE_INIT
|
||||
CONNECTING
|
||||
CONNECTED
|
||||
PING_FAILURE
|
||||
ERROR_RETRYING
|
||||
}
|
||||
|
||||
type MinigraphqlResponse {
|
||||
status: MinigraphStatus!
|
||||
timeout: Int
|
||||
error: String
|
||||
}
|
||||
|
||||
type CloudResponse {
|
||||
status: String!
|
||||
ip: String
|
||||
error: String
|
||||
}
|
||||
|
||||
type RelayResponse {
|
||||
status: String!
|
||||
timeout: String
|
||||
error: String
|
||||
}
|
||||
|
||||
type Cloud {
|
||||
error: String
|
||||
apiKey: ApiKeyResponse!
|
||||
relay: RelayResponse
|
||||
minigraphql: MinigraphqlResponse!
|
||||
cloud: CloudResponse!
|
||||
allowedOrigins: [String!]!
|
||||
}
|
||||
|
||||
type Query {
|
||||
cloud: Cloud
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
type Config implements Node {
|
||||
id: ID!
|
||||
valid: Boolean
|
||||
error: ConfigErrorState
|
||||
}
|
||||
|
||||
type Query {
|
||||
config: Config!
|
||||
}
|
||||
|
||||
type Subscription {
|
||||
config: Config!
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user