mirror of
https://github.com/unraid/api.git
synced 2026-01-02 14:40:01 -06:00
Compare commits
1 Commits
feat/pnpm-
...
release-pl
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c0fdc0dc48 |
@@ -1,21 +0,0 @@
|
||||
{
|
||||
"permissions": {
|
||||
"allow": [
|
||||
"Bash(rg:*)",
|
||||
"Bash(find:*)",
|
||||
"Bash(pnpm codegen:*)",
|
||||
"Bash(pnpm dev:*)",
|
||||
"Bash(pnpm build:*)",
|
||||
"Bash(pnpm test:*)",
|
||||
"Bash(grep:*)",
|
||||
"Bash(pnpm type-check:*)",
|
||||
"Bash(pnpm lint:*)",
|
||||
"Bash(pnpm --filter ./api lint)",
|
||||
"Bash(mv:*)",
|
||||
"Bash(ls:*)",
|
||||
"mcp__ide__getDiagnostics",
|
||||
"Bash(pnpm --filter \"*connect*\" test connect-status-writer.service.spec)"
|
||||
]
|
||||
},
|
||||
"enableAllProjectMcpServers": false
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
---
|
||||
description:
|
||||
globs: api/**/*,api/*
|
||||
alwaysApply: false
|
||||
---
|
||||
|
||||
* pnpm ONLY
|
||||
* always run scripts from api/package.json unless requested
|
||||
* prefer adding new files to the nest repo located at api/src/unraid-api/ instead of the legacy code
|
||||
* Test suite is VITEST, do not use jest
|
||||
pnpm --filter ./api test
|
||||
* Prefer to not mock simple dependencies
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
---
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
---
|
||||
Never add comments unless they are needed for clarity of function
|
||||
|
||||
Be CONCISE, keep replies shorter than a paragraph if at all passible.
|
||||
@@ -1,6 +0,0 @@
|
||||
---
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
---
|
||||
Never add comments for obvious things, and avoid commenting when starting and ending code blocks
|
||||
@@ -1,9 +0,0 @@
|
||||
---
|
||||
description:
|
||||
globs: web/**/*
|
||||
alwaysApply: false
|
||||
---
|
||||
* Always run `pnpm codegen` for GraphQL code generation in the web directory
|
||||
* GraphQL queries must be placed in `.query.ts` files
|
||||
* GraphQL mutations must be placed in `.mutation.ts` files
|
||||
* All GraphQL under `web/` and follow this naming convention
|
||||
@@ -1,240 +0,0 @@
|
||||
---
|
||||
description:
|
||||
globs: **/*.test.ts,**/__test__/components/**/*.ts,**/__test__/store/**/*.ts,**/__test__/mocks/**/*.ts
|
||||
alwaysApply: false
|
||||
---
|
||||
|
||||
## Vue Component Testing Best Practices
|
||||
- This is a Nuxt.js app but we are testing with vitest outside of the Nuxt environment
|
||||
- Nuxt is currently set to auto import so some vue files may need compute or ref imported
|
||||
- Use pnpm when running termical commands and stay within the web directory.
|
||||
- The directory for tests is located under `web/__test__` when running test just run `pnpm test`
|
||||
|
||||
### Setup
|
||||
- Use `mount` from Vue Test Utils for component testing
|
||||
- Stub complex child components that aren't the focus of the test
|
||||
- Mock external dependencies and services
|
||||
|
||||
```typescript
|
||||
import { mount } from '@vue/test-utils';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
import { createTestingPinia } from '@pinia/testing'
|
||||
import { useSomeStore } from '@/stores/myStore'
|
||||
import YourComponent from '~/components/YourComponent.vue';
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('~/helpers/someHelper', () => ({
|
||||
SOME_CONSTANT: 'mocked-value',
|
||||
}));
|
||||
|
||||
describe('YourComponent', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it('renders correctly', () => {
|
||||
const wrapper = mount(YourComponent, {
|
||||
global: {
|
||||
plugins: [createTestingPinia()],
|
||||
stubs: {
|
||||
// Stub child components when needed
|
||||
ChildComponent: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const store = useSomeStore() // uses the testing pinia!
|
||||
// state can be directly manipulated
|
||||
store.name = 'my new name'
|
||||
|
||||
// actions are stubbed by default, meaning they don't execute their code by default.
|
||||
// See below to customize this behavior.
|
||||
store.someAction()
|
||||
|
||||
expect(store.someAction).toHaveBeenCalledTimes(1)
|
||||
|
||||
// Assertions on components
|
||||
expect(wrapper.text()).toContain('Expected content');
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### Testing Patterns
|
||||
- Test component behavior and output, not implementation details
|
||||
- Verify that the expected elements are rendered
|
||||
- Test component interactions (clicks, inputs, etc.)
|
||||
- Check for expected prop handling and event emissions
|
||||
- Use `createTestingPinia()` for mocking stores in components
|
||||
|
||||
### Finding Elements
|
||||
- Use semantic queries like `find('button')` or `find('[data-test="id"]')` but prefer not to use data test ID's
|
||||
- Find components with `findComponent(ComponentName)`
|
||||
- Use `findAll` to check for multiple elements
|
||||
|
||||
### Assertions
|
||||
- Assert on rendered text content with `wrapper.text()`
|
||||
- Assert on element attributes with `element.attributes()`
|
||||
- Verify element existence with `expect(element.exists()).toBe(true)`
|
||||
- Check component state through rendered output
|
||||
|
||||
### Component Interaction
|
||||
- Trigger events with `await element.trigger('click')`
|
||||
- Set input values with `await input.setValue('value')`
|
||||
- Test emitted events with `wrapper.emitted()`
|
||||
|
||||
### Mocking
|
||||
- Mock external services and API calls
|
||||
- Prefer not using mocks whenever possible
|
||||
- Use `vi.mock()` for module-level mocks
|
||||
- Specify return values for component methods with `vi.spyOn()`
|
||||
- Reset mocks between tests with `vi.clearAllMocks()`
|
||||
- Frequently used mocks are stored under `web/test/mocks`
|
||||
|
||||
### Async Testing
|
||||
- Use `await nextTick()` for DOM updates
|
||||
- Use `flushPromises()` for more complex promise chains
|
||||
- Always await async operations before making assertions
|
||||
|
||||
## Store Testing with Pinia
|
||||
|
||||
### Basic Setup
|
||||
- When testing Store files use `createPinia` and `setActivePinia`
|
||||
|
||||
```typescript
|
||||
import { createPinia, setActivePinia } from 'pinia';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
import { useYourStore } from '~/store/your-store';
|
||||
|
||||
// Mock declarations must be at top level due to hoisting
|
||||
const mockDependencyFn = vi.fn();
|
||||
|
||||
// Module mocks must use factory functions
|
||||
vi.mock('~/store/dependency', () => ({
|
||||
useDependencyStore: () => ({
|
||||
someMethod: mockDependencyFn,
|
||||
someProperty: 'mockValue'
|
||||
})
|
||||
}));
|
||||
|
||||
describe('Your Store', () => {
|
||||
let store: ReturnType<typeof useYourStore>;
|
||||
|
||||
beforeEach(() => {
|
||||
setActivePinia(createPinia());
|
||||
store = useYourStore();
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.resetAllMocks();
|
||||
});
|
||||
|
||||
it('tests some action', () => {
|
||||
store.someAction();
|
||||
expect(mockDependencyFn).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### Important Guidelines
|
||||
1. **Store Initialization**
|
||||
- Use `createPinia()` instead of `createTestingPinia()` for most cases
|
||||
- Only use `createTestingPinia` if you specifically need its testing features
|
||||
- Let stores initialize with their natural default state instead of forcing initial state
|
||||
- Do not mock the store we're actually testing in the test file. That's why we're using `createPinia()`
|
||||
|
||||
2. **Vue Reactivity**
|
||||
- Ensure Vue reactivity imports are added to original store files as they may be missing because Nuxt auto import was turned on
|
||||
- Don't rely on Nuxt auto-imports in tests
|
||||
|
||||
```typescript
|
||||
// Required in store files, even with Nuxt auto-imports
|
||||
import { computed, ref, watchEffect } from 'vue';
|
||||
```
|
||||
|
||||
3. **Mocking Best Practices**
|
||||
- Place all mock declarations at the top level
|
||||
- Use factory functions for module mocks to avoid hoisting issues
|
||||
|
||||
```typescript
|
||||
// ❌ Wrong - will cause hoisting issues
|
||||
const mockFn = vi.fn();
|
||||
vi.mock('module', () => ({ method: mockFn }));
|
||||
|
||||
// ✅ Correct - using factory function
|
||||
vi.mock('module', () => {
|
||||
const mockFn = vi.fn();
|
||||
return { method: mockFn };
|
||||
});
|
||||
```
|
||||
|
||||
4. **Testing Actions**
|
||||
- Test action side effects and state changes
|
||||
- Verify actions are called with correct parameters
|
||||
- Mock external dependencies appropriately
|
||||
|
||||
```typescript
|
||||
it('should handle action correctly', () => {
|
||||
store.yourAction();
|
||||
expect(mockDependencyFn).toHaveBeenCalledWith(
|
||||
expectedArg1,
|
||||
expectedArg2
|
||||
);
|
||||
expect(store.someState).toBe(expectedValue);
|
||||
});
|
||||
```
|
||||
|
||||
5. **Common Pitfalls**
|
||||
- Don't mix mock declarations and module mocks incorrectly
|
||||
- Avoid relying on Nuxt's auto-imports in test environment
|
||||
- Clear mocks between tests to ensure isolation
|
||||
- Remember that `vi.mock()` calls are hoisted
|
||||
|
||||
### Testing State & Getters
|
||||
- Test computed properties by accessing them directly
|
||||
- Verify state changes after actions
|
||||
- Test getter dependencies are properly mocked
|
||||
|
||||
```typescript
|
||||
it('computes derived state correctly', () => {
|
||||
store.setState('new value');
|
||||
expect(store.computedValue).toBe('expected result');
|
||||
});
|
||||
```
|
||||
|
||||
### Testing Complex Interactions
|
||||
- Test store interactions with other stores
|
||||
- Verify proper error handling
|
||||
- Test async operations completely
|
||||
|
||||
```typescript
|
||||
it('handles async operations', async () => {
|
||||
const promise = store.asyncAction();
|
||||
expect(store.status).toBe('loading');
|
||||
await promise;
|
||||
expect(store.status).toBe('success');
|
||||
});
|
||||
```
|
||||
|
||||
### Testing Actions
|
||||
- Verify actions are called with the right parameters
|
||||
- Test action side effects if not stubbed
|
||||
- Override specific action implementations when needed
|
||||
|
||||
```typescript
|
||||
// Test action calls
|
||||
store.yourAction(params);
|
||||
expect(store.yourAction).toHaveBeenCalledWith(params);
|
||||
|
||||
// Test with real implementation
|
||||
const pinia = createTestingPinia({
|
||||
createSpy: vi.fn,
|
||||
stubActions: false,
|
||||
});
|
||||
```
|
||||
|
||||
### Testing State & Getters
|
||||
- Set initial state for focused testing
|
||||
- Test computed properties by accessing them directly
|
||||
- Verify state changes by updating the store
|
||||
|
||||
21
.github/CODEOWNERS
vendored
21
.github/CODEOWNERS
vendored
@@ -1,20 +1 @@
|
||||
# Default owners for everything in the repo
|
||||
* @elibosley @pujitm @mdatelle @zackspear
|
||||
|
||||
# API specific files
|
||||
/api/ @elibosley @pujitm @mdatelle
|
||||
|
||||
# Web frontend files
|
||||
/web/ @elibosley @mdatelle @zackspear
|
||||
|
||||
# Plugin related files
|
||||
/plugin/ @elibosley
|
||||
|
||||
# Unraid UI specific files
|
||||
/unraid-ui/ @mdatelle @zackspear @pujitm
|
||||
|
||||
# GitHub workflows and configuration
|
||||
/.github/ @elibosley
|
||||
|
||||
# Documentation
|
||||
*.md @elibosley @pujitm @mdatelle @zackspear
|
||||
@elibosley @pujitm @mdatelle @zackspear
|
||||
2
.github/ISSUE_TEMPLATE/work_intent.md
vendored
2
.github/ISSUE_TEMPLATE/work_intent.md
vendored
@@ -3,7 +3,7 @@ name: Work Intent
|
||||
about: Request approval for planned development work (must be approved before starting)
|
||||
title: 'Work Intent: '
|
||||
labels: work-intent, unapproved
|
||||
assignees: 'elibosley'
|
||||
assignees: ''
|
||||
---
|
||||
|
||||
<!--
|
||||
|
||||
49
.github/codeql/README.md
vendored
49
.github/codeql/README.md
vendored
@@ -1,49 +0,0 @@
|
||||
# CodeQL Security Analysis for Unraid API
|
||||
|
||||
This directory contains custom CodeQL queries and configurations for security analysis of the Unraid API codebase.
|
||||
|
||||
## Overview
|
||||
|
||||
The analysis is configured to run:
|
||||
- On all pushes to the main branch
|
||||
- On all pull requests
|
||||
- Weekly via scheduled runs
|
||||
|
||||
## Custom Queries
|
||||
|
||||
The following custom queries are implemented:
|
||||
|
||||
1. **API Authorization Bypass Detection**
|
||||
Identifies API handlers that may not properly check authorization before performing operations.
|
||||
|
||||
2. **GraphQL Injection Detection**
|
||||
Detects potential injection vulnerabilities in GraphQL queries and operations.
|
||||
|
||||
3. **Hardcoded Secrets Detection**
|
||||
Finds potential hardcoded secrets or credentials in the codebase.
|
||||
|
||||
4. **Insecure Cryptographic Implementations**
|
||||
Identifies usage of weak cryptographic algorithms or insecure random number generation.
|
||||
|
||||
5. **Path Traversal Vulnerability Detection**
|
||||
Detects potential path traversal vulnerabilities in file system operations.
|
||||
|
||||
## Configuration
|
||||
|
||||
The CodeQL analysis is configured in:
|
||||
- `.github/workflows/codeql-analysis.yml` - Workflow configuration
|
||||
- `.github/codeql/codeql-config.yml` - CodeQL engine configuration
|
||||
|
||||
## Running Locally
|
||||
|
||||
To run these queries locally:
|
||||
|
||||
1. Install the CodeQL CLI: https://github.com/github/codeql-cli-binaries/releases
|
||||
2. Create a CodeQL database:
|
||||
```
|
||||
codeql database create <db-name> --language=javascript --source-root=.
|
||||
```
|
||||
3. Run a query:
|
||||
```
|
||||
codeql query run .github/codeql/custom-queries/javascript/api-auth-bypass.ql --database=<db-name>
|
||||
```
|
||||
16
.github/codeql/codeql-config.yml
vendored
16
.github/codeql/codeql-config.yml
vendored
@@ -1,16 +0,0 @@
|
||||
name: "Unraid API CodeQL Configuration"
|
||||
|
||||
disable-default-queries: false
|
||||
|
||||
queries:
|
||||
- name: Extended Security Queries
|
||||
uses: security-extended
|
||||
- name: Custom Unraid API Queries
|
||||
uses: ./.github/codeql/custom-queries
|
||||
|
||||
query-filters:
|
||||
- exclude:
|
||||
problem.severity:
|
||||
- warning
|
||||
- recommendation
|
||||
tags contain: security
|
||||
@@ -1,45 +0,0 @@
|
||||
/**
|
||||
* @name Potential API Authorization Bypass
|
||||
* @description Functions that process API requests without verifying authorization may lead to security vulnerabilities.
|
||||
* @kind problem
|
||||
* @problem.severity error
|
||||
* @precision medium
|
||||
* @id js/api-auth-bypass
|
||||
* @tags security
|
||||
* external/cwe/cwe-285
|
||||
*/
|
||||
|
||||
import javascript
|
||||
|
||||
/**
|
||||
* Identifies functions that appear to handle API requests
|
||||
*/
|
||||
predicate isApiHandler(Function f) {
|
||||
exists(f.getAParameter()) and
|
||||
(
|
||||
f.getName().regexpMatch("(?i).*(api|handler|controller|resolver|endpoint).*") or
|
||||
exists(CallExpr call |
|
||||
call.getCalleeName().regexpMatch("(?i).*(get|post|put|delete|patch).*") and
|
||||
call.getArgument(1) = f
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Identifies expressions that appear to perform authorization checks
|
||||
*/
|
||||
predicate isAuthCheck(DataFlow::Node node) {
|
||||
exists(CallExpr call |
|
||||
call.getCalleeName().regexpMatch("(?i).*(authorize|authenticate|isAuth|checkAuth|verifyAuth|hasPermission|isAdmin|canAccess).*") and
|
||||
call.flow().getASuccessor*() = node
|
||||
)
|
||||
}
|
||||
|
||||
from Function apiHandler
|
||||
where
|
||||
isApiHandler(apiHandler) and
|
||||
not exists(DataFlow::Node authCheck |
|
||||
isAuthCheck(authCheck) and
|
||||
authCheck.getEnclosingExpr().getEnclosingFunction() = apiHandler
|
||||
)
|
||||
select apiHandler, "API handler function may not perform proper authorization checks."
|
||||
@@ -1,77 +0,0 @@
|
||||
/**
|
||||
* @name Potential GraphQL Injection
|
||||
* @description User-controlled input used directly in GraphQL queries may lead to injection vulnerabilities.
|
||||
* @kind path-problem
|
||||
* @problem.severity error
|
||||
* @precision high
|
||||
* @id js/graphql-injection
|
||||
* @tags security
|
||||
* external/cwe/cwe-943
|
||||
*/
|
||||
|
||||
import javascript
|
||||
import DataFlow::PathGraph
|
||||
|
||||
class GraphQLQueryExecution extends DataFlow::CallNode {
|
||||
GraphQLQueryExecution() {
|
||||
exists(string name |
|
||||
name = this.getCalleeName() and
|
||||
(
|
||||
name = "execute" or
|
||||
name = "executeQuery" or
|
||||
name = "query" or
|
||||
name.regexpMatch("(?i).*graphql.*query.*")
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
DataFlow::Node getQuery() {
|
||||
result = this.getArgument(0)
|
||||
}
|
||||
}
|
||||
|
||||
class UserControlledInput extends DataFlow::Node {
|
||||
UserControlledInput() {
|
||||
exists(DataFlow::ParameterNode param |
|
||||
param.getName().regexpMatch("(?i).*(query|request|input|args|variables|params).*") and
|
||||
this = param
|
||||
)
|
||||
or
|
||||
exists(DataFlow::PropRead prop |
|
||||
prop.getPropertyName().regexpMatch("(?i).*(query|request|input|args|variables|params).*") and
|
||||
this = prop
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if `node` is a string concatenation.
|
||||
*/
|
||||
predicate isStringConcatenation(DataFlow::Node node) {
|
||||
exists(BinaryExpr concat |
|
||||
concat.getOperator() = "+" and
|
||||
concat.flow() = node
|
||||
)
|
||||
}
|
||||
|
||||
class GraphQLInjectionConfig extends TaintTracking::Configuration {
|
||||
GraphQLInjectionConfig() { this = "GraphQLInjectionConfig" }
|
||||
|
||||
override predicate isSource(DataFlow::Node source) {
|
||||
source instanceof UserControlledInput
|
||||
}
|
||||
|
||||
override predicate isSink(DataFlow::Node sink) {
|
||||
exists(GraphQLQueryExecution exec | sink = exec.getQuery())
|
||||
}
|
||||
|
||||
override predicate isAdditionalTaintStep(DataFlow::Node pred, DataFlow::Node succ) {
|
||||
// Add any GraphQL-specific taint steps if needed
|
||||
isStringConcatenation(succ) and
|
||||
succ.(DataFlow::BinaryExprNode).getAnOperand() = pred
|
||||
}
|
||||
}
|
||||
|
||||
from GraphQLInjectionConfig config, DataFlow::PathNode source, DataFlow::PathNode sink
|
||||
where config.hasFlowPath(source, sink)
|
||||
select sink.getNode(), source, sink, "GraphQL query may contain user-controlled input from $@.", source.getNode(), "user input"
|
||||
@@ -1,53 +0,0 @@
|
||||
/**
|
||||
* @name Hardcoded Secrets
|
||||
* @description Hardcoded secrets or credentials in source code can lead to security vulnerabilities.
|
||||
* @kind problem
|
||||
* @problem.severity error
|
||||
* @precision medium
|
||||
* @id js/hardcoded-secrets
|
||||
* @tags security
|
||||
* external/cwe/cwe-798
|
||||
*/
|
||||
|
||||
import javascript
|
||||
|
||||
/**
|
||||
* Identifies variable declarations or assignments that may contain secrets
|
||||
*/
|
||||
predicate isSensitiveAssignment(DataFlow::Node node) {
|
||||
exists(DataFlow::PropWrite propWrite |
|
||||
propWrite.getPropertyName().regexpMatch("(?i).*(secret|key|password|token|credential|auth).*") and
|
||||
propWrite.getRhs() = node
|
||||
)
|
||||
or
|
||||
exists(VariableDeclarator decl |
|
||||
decl.getName().regexpMatch("(?i).*(secret|key|password|token|credential|auth).*") and
|
||||
decl.getInit().flow() = node
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Identifies literals that look like secrets
|
||||
*/
|
||||
predicate isSecretLiteral(StringLiteral literal) {
|
||||
// Match alphanumeric strings of moderate length that may be secrets
|
||||
literal.getValue().regexpMatch("[A-Za-z0-9_\\-]{8,}") and
|
||||
|
||||
not (
|
||||
// Skip likely non-sensitive literals
|
||||
literal.getValue().regexpMatch("(?i)^(true|false|null|undefined|localhost|development|production|staging)$") or
|
||||
// Skip URLs without credentials
|
||||
literal.getValue().regexpMatch("^https?://[^:@/]+")
|
||||
)
|
||||
}
|
||||
|
||||
from DataFlow::Node source
|
||||
where
|
||||
isSensitiveAssignment(source) and
|
||||
(
|
||||
exists(StringLiteral literal |
|
||||
literal.flow() = source and
|
||||
isSecretLiteral(literal)
|
||||
)
|
||||
)
|
||||
select source, "This assignment may contain a hardcoded secret or credential."
|
||||
@@ -1,90 +0,0 @@
|
||||
/**
|
||||
* @name Insecure Cryptographic Implementation
|
||||
* @description Usage of weak cryptographic algorithms or improper implementations can lead to security vulnerabilities.
|
||||
* @kind problem
|
||||
* @problem.severity error
|
||||
* @precision high
|
||||
* @id js/insecure-crypto
|
||||
* @tags security
|
||||
* external/cwe/cwe-327
|
||||
*/
|
||||
|
||||
import javascript
|
||||
|
||||
/**
|
||||
* Identifies calls to crypto functions with insecure algorithms
|
||||
*/
|
||||
predicate isInsecureCryptoCall(CallExpr call) {
|
||||
// Node.js crypto module uses
|
||||
exists(string methodName |
|
||||
methodName = call.getCalleeName() and
|
||||
(
|
||||
// Detect MD5 usage
|
||||
methodName.regexpMatch("(?i).*md5.*") or
|
||||
methodName.regexpMatch("(?i).*sha1.*") or
|
||||
|
||||
// Insecure crypto constructors
|
||||
(
|
||||
methodName = "createHash" or
|
||||
methodName = "createCipheriv" or
|
||||
methodName = "createDecipher"
|
||||
) and
|
||||
(
|
||||
exists(StringLiteral algo |
|
||||
algo = call.getArgument(0) and
|
||||
(
|
||||
algo.getValue().regexpMatch("(?i).*(md5|md4|md2|sha1|des|rc4|blowfish).*") or
|
||||
algo.getValue().regexpMatch("(?i).*(ecb).*") // ECB mode
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
or
|
||||
// Browser crypto API uses
|
||||
exists(MethodCallExpr mce, string propertyName |
|
||||
propertyName = mce.getMethodName() and
|
||||
(
|
||||
propertyName = "subtle" and
|
||||
exists(MethodCallExpr subtleCall |
|
||||
subtleCall.getReceiver() = mce and
|
||||
subtleCall.getMethodName() = "encrypt" and
|
||||
exists(ObjectExpr obj |
|
||||
obj = subtleCall.getArgument(0) and
|
||||
exists(Property p |
|
||||
p = obj.getAProperty() and
|
||||
p.getName() = "name" and
|
||||
exists(StringLiteral algo |
|
||||
algo = p.getInit() and
|
||||
algo.getValue().regexpMatch("(?i).*(rc4|des|aes-cbc).*")
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Identifies usage of Math.random() for security-sensitive operations
|
||||
*/
|
||||
predicate isInsecureRandomCall(CallExpr call) {
|
||||
exists(PropertyAccess prop |
|
||||
prop.getPropertyName() = "random" and
|
||||
prop.getBase().toString() = "Math" and
|
||||
call.getCallee() = prop
|
||||
)
|
||||
}
|
||||
|
||||
from Expr insecureExpr, string message
|
||||
where
|
||||
(
|
||||
insecureExpr instanceof CallExpr and
|
||||
isInsecureCryptoCall(insecureExpr) and
|
||||
message = "Using potentially insecure cryptographic algorithm or mode."
|
||||
) or (
|
||||
insecureExpr instanceof CallExpr and
|
||||
isInsecureRandomCall(insecureExpr) and
|
||||
message = "Using Math.random() for security-sensitive operation. Consider using crypto.getRandomValues() instead."
|
||||
)
|
||||
select insecureExpr, message
|
||||
@@ -1,130 +0,0 @@
|
||||
/**
|
||||
* @name Path Traversal Vulnerability
|
||||
* @description User-controlled inputs used in file operations may allow for path traversal attacks.
|
||||
* @kind path-problem
|
||||
* @problem.severity error
|
||||
* @precision high
|
||||
* @id js/path-traversal
|
||||
* @tags security
|
||||
* external/cwe/cwe-22
|
||||
*/
|
||||
|
||||
import javascript
|
||||
import DataFlow::PathGraph
|
||||
|
||||
/**
|
||||
* Identifies sources of user-controlled input
|
||||
*/
|
||||
class UserInput extends DataFlow::Node {
|
||||
UserInput() {
|
||||
// HTTP request parameters
|
||||
exists(DataFlow::ParameterNode param |
|
||||
param.getName().regexpMatch("(?i).*(req|request|param|query|body|user|input).*") and
|
||||
this = param
|
||||
)
|
||||
or
|
||||
// Access to common request properties
|
||||
exists(DataFlow::PropRead prop |
|
||||
(
|
||||
prop.getPropertyName() = "query" or
|
||||
prop.getPropertyName() = "body" or
|
||||
prop.getPropertyName() = "params" or
|
||||
prop.getPropertyName() = "files"
|
||||
) and
|
||||
this = prop
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Identifies fs module imports
|
||||
*/
|
||||
class FileSystemAccess extends DataFlow::CallNode {
|
||||
FileSystemAccess() {
|
||||
// Node.js fs module functions
|
||||
exists(string name |
|
||||
name = this.getCalleeName() and
|
||||
(
|
||||
name = "readFile" or
|
||||
name = "readFileSync" or
|
||||
name = "writeFile" or
|
||||
name = "writeFileSync" or
|
||||
name = "appendFile" or
|
||||
name = "appendFileSync" or
|
||||
name = "createReadStream" or
|
||||
name = "createWriteStream" or
|
||||
name = "openSync" or
|
||||
name = "open"
|
||||
)
|
||||
)
|
||||
or
|
||||
// File system operations via require('fs')
|
||||
exists(DataFlow::SourceNode fsModule, string methodName |
|
||||
(fsModule.getAPropertyRead("promises") or fsModule).flowsTo(this.getReceiver()) and
|
||||
methodName = this.getMethodName() and
|
||||
(
|
||||
methodName = "readFile" or
|
||||
methodName = "writeFile" or
|
||||
methodName = "appendFile" or
|
||||
methodName = "readdir" or
|
||||
methodName = "stat"
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
DataFlow::Node getPathArgument() {
|
||||
result = this.getArgument(0)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Identifies sanitization of file paths
|
||||
*/
|
||||
predicate isPathSanitized(DataFlow::Node node) {
|
||||
// Check for path normalization or validation
|
||||
exists(DataFlow::CallNode call |
|
||||
(
|
||||
call.getCalleeName() = "resolve" or
|
||||
call.getCalleeName() = "normalize" or
|
||||
call.getCalleeName() = "isAbsolute" or
|
||||
call.getCalleeName() = "relative" or
|
||||
call.getCalleeName().regexpMatch("(?i).*(sanitize|validate|check).*path.*")
|
||||
) and
|
||||
call.flowsTo(node)
|
||||
)
|
||||
or
|
||||
// Check for path traversal mitigation patterns
|
||||
exists(DataFlow::CallNode call |
|
||||
call.getCalleeName() = "replace" and
|
||||
exists(StringLiteral regex |
|
||||
regex = call.getArgument(0).(DataFlow::RegExpCreationNode).getSource().getAChildExpr() and
|
||||
regex.getValue().regexpMatch("(\\.\\./|\\.\\.\\\\)")
|
||||
) and
|
||||
call.flowsTo(node)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration for tracking flow from user input to file system operations
|
||||
*/
|
||||
class PathTraversalConfig extends TaintTracking::Configuration {
|
||||
PathTraversalConfig() { this = "PathTraversalConfig" }
|
||||
|
||||
override predicate isSource(DataFlow::Node source) {
|
||||
source instanceof UserInput
|
||||
}
|
||||
|
||||
override predicate isSink(DataFlow::Node sink) {
|
||||
exists(FileSystemAccess fileAccess |
|
||||
sink = fileAccess.getPathArgument()
|
||||
)
|
||||
}
|
||||
|
||||
override predicate isSanitizer(DataFlow::Node node) {
|
||||
isPathSanitized(node)
|
||||
}
|
||||
}
|
||||
|
||||
from PathTraversalConfig config, DataFlow::PathNode source, DataFlow::PathNode sink
|
||||
where config.hasFlowPath(source, sink)
|
||||
select sink.getNode(), source, sink, "File system operation depends on a user-controlled value $@.", source.getNode(), "user input"
|
||||
183
.github/workflows/build-plugin.yml
vendored
183
.github/workflows/build-plugin.yml
vendored
@@ -1,183 +0,0 @@
|
||||
name: Build Plugin Component
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
RELEASE_CREATED:
|
||||
type: string
|
||||
required: true
|
||||
description: "Whether a release was created"
|
||||
RELEASE_TAG:
|
||||
type: string
|
||||
required: false
|
||||
description: "Name of the tag when a release is created"
|
||||
TAG:
|
||||
type: string
|
||||
required: false
|
||||
description: "Tag for the build (e.g. PR number or version)"
|
||||
BUCKET_PATH:
|
||||
type: string
|
||||
required: true
|
||||
description: "Path in the bucket where artifacts should be stored"
|
||||
BASE_URL:
|
||||
type: string
|
||||
required: true
|
||||
description: "Base URL for the plugin builds"
|
||||
BUILD_NUMBER:
|
||||
type: string
|
||||
required: true
|
||||
description: "Build number for the plugin builds"
|
||||
secrets:
|
||||
CF_ACCESS_KEY_ID:
|
||||
required: true
|
||||
CF_SECRET_ACCESS_KEY:
|
||||
required: true
|
||||
CF_BUCKET_PREVIEW:
|
||||
required: true
|
||||
CF_ENDPOINT:
|
||||
required: true
|
||||
jobs:
|
||||
build-plugin:
|
||||
name: Build and Deploy Plugin
|
||||
defaults:
|
||||
run:
|
||||
working-directory: plugin
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Get API Version
|
||||
id: vars
|
||||
run: |
|
||||
GIT_SHA=$(git rev-parse --short HEAD)
|
||||
IS_TAGGED=$(git describe --tags --abbrev=0 --exact-match || echo '')
|
||||
PACKAGE_LOCK_VERSION=$(jq -r '.version' package.json)
|
||||
API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")
|
||||
echo "API_VERSION=${API_VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile --filter @unraid/connect-plugin
|
||||
|
||||
- name: Download Unraid UI Components
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: unraid-wc-ui
|
||||
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/uui
|
||||
merge-multiple: true
|
||||
- name: Download Unraid Web Components
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: unraid-wc-rich
|
||||
path: ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components/nuxt
|
||||
merge-multiple: true
|
||||
- name: Download Unraid API
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: unraid-api
|
||||
path: ${{ github.workspace }}/plugin/api/
|
||||
- name: Extract Unraid API
|
||||
run: |
|
||||
mkdir -p ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/unraid-api
|
||||
tar -xzf ${{ github.workspace }}/plugin/api/unraid-api.tgz -C ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/unraid-api
|
||||
- name: Build Plugin and TXZ Based on Event and Tag
|
||||
id: build-plugin
|
||||
run: |
|
||||
cd ${{ github.workspace }}/plugin
|
||||
pnpm run build:txz --tag="${{ inputs.TAG }}" --base-url="${{ inputs.BASE_URL }}" --api-version="${{ steps.vars.outputs.API_VERSION }}" --build-number="${{ inputs.BUILD_NUMBER }}"
|
||||
pnpm run build:plugin --tag="${{ inputs.TAG }}" --base-url="${{ inputs.BASE_URL }}" --api-version="${{ steps.vars.outputs.API_VERSION }}" --build-number="${{ inputs.BUILD_NUMBER }}"
|
||||
|
||||
- name: Ensure Plugin Files Exist
|
||||
run: |
|
||||
ls -al ./deploy
|
||||
if [ ! -f ./deploy/*.plg ]; then
|
||||
echo "Error: .plg file not found in plugin/deploy/"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -f ./deploy/*.txz ]; then
|
||||
echo "Error: .txz file not found in plugin/deploy/"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Upload to GHA
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-plugin-${{ github.run_id }}-${{ inputs.RELEASE_TAG }}
|
||||
path: plugin/deploy/
|
||||
|
||||
- name: Upload Release Assets
|
||||
if: inputs.RELEASE_CREATED == 'true'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
RELEASE_TAG: ${{ inputs.RELEASE_TAG }}
|
||||
run: |
|
||||
# For each file in release directory
|
||||
for file in deploy/*; do
|
||||
echo "Uploading $file to release..."
|
||||
gh release upload "${RELEASE_TAG}" "$file" --clobber
|
||||
done
|
||||
|
||||
- name: Workflow Dispatch and wait
|
||||
if: inputs.RELEASE_CREATED == 'true'
|
||||
uses: the-actions-org/workflow-dispatch@v4.0.0
|
||||
with:
|
||||
workflow: release-production.yml
|
||||
inputs: '{ "version": "${{ steps.vars.outputs.API_VERSION }}" }'
|
||||
token: ${{ secrets.WORKFLOW_TRIGGER_PAT }}
|
||||
|
||||
- name: Upload to Cloudflare
|
||||
if: inputs.RELEASE_CREATED == 'false'
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: auto
|
||||
run: |
|
||||
# Sync the deploy directory to the Cloudflare bucket with explicit content encoding and public-read ACL
|
||||
aws s3 sync deploy/ s3://${{ secrets.CF_BUCKET_PREVIEW }}/${{ inputs.BUCKET_PATH }} \
|
||||
--endpoint-url ${{ secrets.CF_ENDPOINT }} \
|
||||
--checksum-algorithm CRC32 \
|
||||
--no-guess-mime-type \
|
||||
--content-encoding none \
|
||||
--acl public-read
|
||||
|
||||
- name: Comment URL
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v3
|
||||
with:
|
||||
comment-tag: prlink
|
||||
mode: recreate
|
||||
message: |
|
||||
This plugin has been deployed to Cloudflare R2 and is available for testing.
|
||||
Download it at this URL:
|
||||
```
|
||||
${{ inputs.BASE_URL }}/tag/${{ inputs.TAG }}/dynamix.unraid.net.plg
|
||||
```
|
||||
40
.github/workflows/codeql-analysis.yml
vendored
40
.github/workflows/codeql-analysis.yml
vendored
@@ -1,40 +0,0 @@
|
||||
name: "CodeQL Security Analysis"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
schedule:
|
||||
- cron: '0 0 * * 0' # Run weekly on Sundays
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'javascript', 'typescript' ]
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/codeql-config.yml
|
||||
queries: +security-and-quality
|
||||
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
76
.github/workflows/deploy-storybook.yml
vendored
76
.github/workflows/deploy-storybook.yml
vendored
@@ -1,76 +0,0 @@
|
||||
name: Deploy Storybook to Cloudflare Workers
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
issues: write
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'unraid-ui/**'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'unraid-ui/**'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
name: Deploy Storybook
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '22.17.0'
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential libvirt-daemon-system
|
||||
version: 1.0
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build Storybook
|
||||
run: |
|
||||
cd unraid-ui
|
||||
pnpm build-storybook
|
||||
|
||||
- name: Deploy to Cloudflare Workers (Staging)
|
||||
id: deploy_staging
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: cloudflare/wrangler-action@v3
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_DEPLOY_TOKEN }}
|
||||
command: deploy --env staging
|
||||
workingDirectory: unraid-ui
|
||||
|
||||
- name: Deploy to Cloudflare Workers (Production)
|
||||
if: github.ref == 'refs/heads/main' && github.event_name == 'push'
|
||||
uses: cloudflare/wrangler-action@v3
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_DEPLOY_TOKEN }}
|
||||
command: deploy
|
||||
workingDirectory: unraid-ui
|
||||
|
||||
- name: Comment PR with deployment URL
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: `🚀 Storybook has been deployed to staging: ${{ steps.deploy_staging.outputs['deployment-url'] }}`
|
||||
})
|
||||
381
.github/workflows/main.yml
vendored
381
.github/workflows/main.yml
vendored
@@ -12,25 +12,19 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
release-please:
|
||||
name: Release Please
|
||||
# Only run release-please on pushes to main
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
# Only run release-please on pushes to main
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
|
||||
- id: release
|
||||
uses: googleapis/release-please-action@v4
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
outputs:
|
||||
releases_created: ${{ steps.release.outputs.releases_created || 'false' }}
|
||||
tag_name: ${{ steps.release.outputs.tag_name || '' }}
|
||||
releases_created: ${{ steps.release.outputs.releases_created }}
|
||||
tag_name: ${{ steps.release.outputs.tag_name }}
|
||||
test-api:
|
||||
name: Test API
|
||||
defaults:
|
||||
run:
|
||||
working-directory: api
|
||||
@@ -45,9 +39,9 @@ jobs:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential libvirt-daemon-system
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
|
||||
- name: Install pnpm
|
||||
@@ -75,89 +69,12 @@ jobs:
|
||||
- name: Lint
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Type Check
|
||||
run: pnpm run type-check
|
||||
|
||||
- name: Setup libvirt
|
||||
run: |
|
||||
# Create required groups (if they don't already exist)
|
||||
sudo groupadd -f libvirt
|
||||
sudo groupadd -f kvm
|
||||
|
||||
# Create libvirt user if not present, and add it to the kvm group
|
||||
sudo useradd -m -s /bin/bash -g libvirt libvirt || true
|
||||
sudo usermod -aG kvm libvirt || true
|
||||
|
||||
# Set up libvirt directories and permissions
|
||||
sudo mkdir -p /var/run/libvirt /var/log/libvirt /etc/libvirt
|
||||
sudo chown root:libvirt /var/run/libvirt /var/log/libvirt
|
||||
sudo chmod g+w /var/run/libvirt /var/log/libvirt
|
||||
|
||||
# Configure libvirt by appending required settings
|
||||
sudo tee -a /etc/libvirt/libvirtd.conf > /dev/null <<EOF
|
||||
unix_sock_group = "libvirt"
|
||||
unix_sock_rw_perms = "0770"
|
||||
auth_unix_rw = "none"
|
||||
EOF
|
||||
|
||||
# Add the current user to libvirt and kvm groups (note: this change won't apply to the current session)
|
||||
sudo usermod -aG libvirt,kvm $USER
|
||||
|
||||
sudo mkdir -p /var/run/libvirt
|
||||
sudo chown root:libvirt /var/run/libvirt
|
||||
sudo chmod 775 /var/run/libvirt
|
||||
|
||||
|
||||
# Start libvirtd in the background
|
||||
sudo /usr/sbin/libvirtd --daemon
|
||||
|
||||
# Wait a bit longer for libvirtd to start
|
||||
sleep 5
|
||||
|
||||
# Verify libvirt is running using sudo to bypass group membership delays
|
||||
sudo virsh list --all || true
|
||||
|
||||
- uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: latest
|
||||
|
||||
- name: Run Tests Concurrently
|
||||
run: |
|
||||
set -e
|
||||
|
||||
# Run all tests in parallel with labeled output
|
||||
echo "🚀 Starting API coverage tests..."
|
||||
pnpm run coverage > api-test.log 2>&1 &
|
||||
API_PID=$!
|
||||
|
||||
echo "🚀 Starting Connect plugin tests..."
|
||||
(cd ../packages/unraid-api-plugin-connect && pnpm test) > connect-test.log 2>&1 &
|
||||
CONNECT_PID=$!
|
||||
|
||||
echo "🚀 Starting Shared package tests..."
|
||||
(cd ../packages/unraid-shared && pnpm test) > shared-test.log 2>&1 &
|
||||
SHARED_PID=$!
|
||||
|
||||
# Wait for all processes and capture exit codes
|
||||
wait $API_PID && echo "✅ API tests completed" || { echo "❌ API tests failed"; API_EXIT=1; }
|
||||
wait $CONNECT_PID && echo "✅ Connect tests completed" || { echo "❌ Connect tests failed"; CONNECT_EXIT=1; }
|
||||
wait $SHARED_PID && echo "✅ Shared tests completed" || { echo "❌ Shared tests failed"; SHARED_EXIT=1; }
|
||||
|
||||
# Display all outputs
|
||||
echo "📋 API Test Results:" && cat api-test.log
|
||||
echo "📋 Connect Plugin Test Results:" && cat connect-test.log
|
||||
echo "📋 Shared Package Test Results:" && cat shared-test.log
|
||||
|
||||
# Exit with error if any test failed
|
||||
if [[ ${API_EXIT:-0} -eq 1 || ${CONNECT_EXIT:-0} -eq 1 || ${SHARED_EXIT:-0} -eq 1 ]]; then
|
||||
exit 1
|
||||
fi
|
||||
- name: Test
|
||||
run: pnpm run coverage
|
||||
|
||||
build-api:
|
||||
name: Build API
|
||||
name: Build and Test API
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
build_number: ${{ steps.buildnumber.outputs.build_number }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: api
|
||||
@@ -190,7 +107,7 @@ jobs:
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
@@ -200,6 +117,13 @@ jobs:
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: Lint
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Type Check
|
||||
run: pnpm run type-check
|
||||
continue-on-error: true
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build
|
||||
|
||||
@@ -211,26 +135,15 @@ jobs:
|
||||
PACKAGE_LOCK_VERSION=$(jq -r '.version' package.json)
|
||||
API_VERSION=$([[ -n "$IS_TAGGED" ]] && echo "$PACKAGE_LOCK_VERSION" || echo "${PACKAGE_LOCK_VERSION}+${GIT_SHA}")
|
||||
export API_VERSION
|
||||
echo "API_VERSION=${API_VERSION}" >> $GITHUB_ENV
|
||||
echo "PACKAGE_LOCK_VERSION=${PACKAGE_LOCK_VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Generate build number
|
||||
id: buildnumber
|
||||
uses: onyxmueller/build-tag-number@v1
|
||||
with:
|
||||
token: ${{secrets.github_token}}
|
||||
prefix: ${{steps.vars.outputs.PACKAGE_LOCK_VERSION}}
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
pnpm run build:release
|
||||
tar -czf deploy/unraid-api.tgz -C deploy/pack/ .
|
||||
run: pnpm run build-and-pack
|
||||
|
||||
- name: Upload tgz to Github artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-api
|
||||
path: ${{ github.workspace }}/api/deploy/unraid-api.tgz
|
||||
path: ${{ github.workspace }}/api/deploy/release/*.tgz
|
||||
|
||||
build-unraid-ui-webcomponents:
|
||||
name: Build Unraid UI Library (Webcomponent Version)
|
||||
@@ -267,7 +180,7 @@ jobs:
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
with:
|
||||
packages: bash procps python3 libvirt-dev jq zstd git build-essential
|
||||
version: 1.0
|
||||
@@ -277,9 +190,6 @@ jobs:
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile --filter @unraid/ui
|
||||
|
||||
- name: Lint
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Build
|
||||
run: pnpm run build:wc
|
||||
|
||||
@@ -287,11 +197,13 @@ jobs:
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unraid-wc-ui
|
||||
path: unraid-ui/dist-wc/
|
||||
path: unraid-ui/dist/
|
||||
|
||||
build-web:
|
||||
# needs: [build-unraid-ui]
|
||||
name: Build Web App
|
||||
environment:
|
||||
name: production
|
||||
defaults:
|
||||
run:
|
||||
working-directory: web
|
||||
@@ -303,10 +215,10 @@ jobs:
|
||||
- name: Create env file
|
||||
run: |
|
||||
touch .env
|
||||
echo VITE_ACCOUNT=${{ secrets.VITE_ACCOUNT }} >> .env
|
||||
echo VITE_CONNECT=${{ secrets.VITE_CONNECT }} >> .env
|
||||
echo VITE_UNRAID_NET=${{ secrets.VITE_UNRAID_NET }} >> .env
|
||||
echo VITE_CALLBACK_KEY=${{ secrets.VITE_CALLBACK_KEY }} >> .env
|
||||
echo VITE_ACCOUNT=${{ vars.VITE_ACCOUNT }} >> .env
|
||||
echo VITE_CONNECT=${{ vars.VITE_CONNECT }} >> .env
|
||||
echo VITE_UNRAID_NET=${{ vars.VITE_UNRAID_NET }} >> .env
|
||||
echo VITE_CALLBACK_KEY=${{ vars.VITE_CALLBACK_KEY }} >> .env
|
||||
cat .env
|
||||
|
||||
- name: Install Node
|
||||
@@ -342,13 +254,10 @@ jobs:
|
||||
run: |
|
||||
cd ${{ github.workspace }}/unraid-ui
|
||||
pnpm run build
|
||||
|
||||
- name: Lint files
|
||||
continue-on-error: true
|
||||
run: pnpm run lint
|
||||
|
||||
- name: Type Check
|
||||
run: pnpm run type-check
|
||||
|
||||
- name: Test
|
||||
run: pnpm run test:ci
|
||||
|
||||
@@ -361,46 +270,196 @@ jobs:
|
||||
name: unraid-wc-rich
|
||||
path: web/.nuxt/nuxt-custom-elements/dist/unraid-components
|
||||
|
||||
build-plugin-staging-pr:
|
||||
name: Build and Deploy Plugin
|
||||
needs:
|
||||
- release-please
|
||||
- build-api
|
||||
- build-web
|
||||
- build-unraid-ui-webcomponents
|
||||
- test-api
|
||||
uses: ./.github/workflows/build-plugin.yml
|
||||
with:
|
||||
RELEASE_CREATED: false
|
||||
TAG: ${{ github.event.pull_request.number && format('PR{0}', github.event.pull_request.number) || '' }}
|
||||
BUCKET_PATH: ${{ github.event.pull_request.number && format('unraid-api/tag/PR{0}', github.event.pull_request.number) || 'unraid-api' }}
|
||||
BASE_URL: "https://preview.dl.unraid.net/unraid-api"
|
||||
BUILD_NUMBER: ${{ needs.build-api.outputs.build_number }}
|
||||
secrets:
|
||||
CF_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
CF_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
CF_BUCKET_PREVIEW: ${{ secrets.CF_BUCKET_PREVIEW }}
|
||||
CF_ENDPOINT: ${{ secrets.CF_ENDPOINT }}
|
||||
build-plugin:
|
||||
needs: [build-api, build-web, build-unraid-ui-webcomponents]
|
||||
outputs:
|
||||
tag: ${{ steps.build-plugin.outputs.tag }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: plugin
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Set Timezone
|
||||
uses: szenius/set-timezone@v1.2
|
||||
with:
|
||||
timezoneLinux: "America/Los_Angeles"
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
build-plugin-production:
|
||||
if: ${{ needs.release-please.outputs.releases_created == 'true' }}
|
||||
name: Build and Deploy Production Plugin
|
||||
needs:
|
||||
- release-please
|
||||
- build-api
|
||||
- build-web
|
||||
- build-unraid-ui-webcomponents
|
||||
- test-api
|
||||
uses: ./.github/workflows/build-plugin.yml
|
||||
with:
|
||||
RELEASE_CREATED: true
|
||||
RELEASE_TAG: ${{ needs.release-please.outputs.tag_name }}
|
||||
TAG: ""
|
||||
BUCKET_PATH: unraid-api
|
||||
BASE_URL: "https://stable.dl.unraid.net/unraid-api"
|
||||
BUILD_NUMBER: ${{ needs.build-api.outputs.build_number }}
|
||||
secrets:
|
||||
CF_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
CF_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
CF_BUCKET_PREVIEW: ${{ secrets.CF_BUCKET_PREVIEW }}
|
||||
CF_ENDPOINT: ${{ secrets.CF_ENDPOINT }}
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd ${{ github.workspace }}
|
||||
pnpm install --frozen-lockfile --filter @unraid/connect-plugin
|
||||
|
||||
- name: Download Unraid Web Components
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: unraid-wc-*
|
||||
path: ./plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/unraid-components
|
||||
merge-multiple: true
|
||||
- name: Download Unraid API
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: unraid-api
|
||||
path: /tmp/unraid-api/
|
||||
- name: Extract Unraid API and Build Plugin
|
||||
id: build-plugin
|
||||
run: |
|
||||
tar -xzf /tmp/unraid-api/unraid-api.tgz -C ${{ github.workspace }}/plugin/source/dynamix.unraid.net/usr/local/unraid-api
|
||||
cd ${{ github.workspace }}/plugin
|
||||
|
||||
if [ -n "${{ github.event.pull_request.number }}" ]; then
|
||||
export TAG=PR${{ github.event.pull_request.number }}
|
||||
# Put tag into github env
|
||||
echo "TAG=${TAG}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
pnpm run build
|
||||
- name: Upload binary txz and plg to Github artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: connect-files
|
||||
path: |
|
||||
plugin/deploy/release/plugins/
|
||||
plugin/deploy/release/archive/*.txz
|
||||
retention-days: 5
|
||||
if-no-files-found: error
|
||||
|
||||
release-pull-request:
|
||||
if: |
|
||||
github.event_name == 'pull_request'
|
||||
runs-on: ubuntu-latest
|
||||
needs: [test-api, build-plugin]
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Make PR Release Folder
|
||||
run: mkdir pr-release/
|
||||
|
||||
- name: Download plugin binary tgz
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: connect-files
|
||||
|
||||
- name: Copy other release files to pr-release
|
||||
run: |
|
||||
cp archive/*.txz pr-release/
|
||||
cp plugins/pr/dynamix.unraid.net.plg pr-release/dynamix.unraid.net.plg
|
||||
|
||||
- name: Upload to Cloudflare
|
||||
uses: jakejarvis/s3-sync-action@v0.5.1
|
||||
env:
|
||||
AWS_S3_ENDPOINT: ${{ secrets.CF_ENDPOINT }}
|
||||
AWS_S3_BUCKET: ${{ secrets.CF_BUCKET_PREVIEW }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
AWS_REGION: "auto"
|
||||
SOURCE_DIR: pr-release
|
||||
DEST_DIR: unraid-api/tag/${{ needs.build-plugin.outputs.tag }}
|
||||
- name: Comment URL
|
||||
uses: thollander/actions-comment-pull-request@v3
|
||||
with:
|
||||
comment-tag: prlink
|
||||
mode: recreate
|
||||
message: |
|
||||
This plugin has been deployed to Cloudflare R2 and is available for testing.
|
||||
Download it at this URL:
|
||||
```
|
||||
https://preview.dl.unraid.net/unraid-api/tag/${{ needs.build-plugin.outputs.tag }}/dynamix.unraid.net.plg
|
||||
```
|
||||
|
||||
release-staging:
|
||||
environment:
|
||||
name: staging
|
||||
# Only release if this is a push to the main branch
|
||||
if: startsWith(github.ref, 'refs/heads/main')
|
||||
runs-on: ubuntu-latest
|
||||
needs: [test-api, build-plugin]
|
||||
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Make Staging Release Folder
|
||||
run: mkdir staging-release/
|
||||
|
||||
- name: Download plugin binary tgz
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: connect-files
|
||||
|
||||
- name: Copy Files for Staging Release
|
||||
run: |
|
||||
cp archive/*.txz staging-release/
|
||||
cp plugins/staging/dynamix.unraid.net.plg staging-release/dynamix.unraid.net.plg
|
||||
ls -al staging-release
|
||||
|
||||
- name: Upload Staging Plugin to Cloudflare Bucket
|
||||
uses: jakejarvis/s3-sync-action@v0.5.1
|
||||
env:
|
||||
AWS_S3_ENDPOINT: ${{ secrets.CF_ENDPOINT }}
|
||||
AWS_S3_BUCKET: ${{ secrets.CF_BUCKET_PREVIEW }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
AWS_REGION: "auto"
|
||||
SOURCE_DIR: staging-release
|
||||
DEST_DIR: unraid-api
|
||||
|
||||
create-draft-release:
|
||||
# Only run if release-please created a release
|
||||
if: needs.release-please.outputs.releases_created == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
needs: [release-please, test-api, build-plugin]
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download plugin binary tgz
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: connect-files
|
||||
|
||||
- name: Move Files to Release Folder
|
||||
run: |
|
||||
mkdir -p release/
|
||||
mv plugins/production/dynamix.unraid.net.plg release/
|
||||
mv archive/*.txz release/
|
||||
|
||||
- name: Upload Release Assets
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
release_name=$(gh release list --repo ${{ github.repository }} --json name,isDraft --jq '.[] | select(.isDraft == true) | .name' | head -n 1)
|
||||
# For each file in release directory
|
||||
for file in release/*; do
|
||||
echo "Uploading $file to release..."
|
||||
gh release upload "${release_name}" "$file" --clobber
|
||||
done
|
||||
|
||||
51
.github/workflows/push-staging-pr-on-close.yml
vendored
51
.github/workflows/push-staging-pr-on-close.yml
vendored
@@ -4,68 +4,43 @@ on:
|
||||
pull_request:
|
||||
types:
|
||||
- closed
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
pr_number:
|
||||
description: "PR number to test with"
|
||||
required: true
|
||||
type: string
|
||||
pr_merged:
|
||||
description: "Simulate merged PR"
|
||||
required: true
|
||||
type: boolean
|
||||
default: true
|
||||
|
||||
jobs:
|
||||
push-staging:
|
||||
if: (github.event_name == 'pull_request' && github.event.pull_request.merged == true) || (github.event_name == 'workflow_dispatch' && inputs.pr_merged == true)
|
||||
if: github.event.pull_request.merged == true
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
steps:
|
||||
- name: Set Timezone
|
||||
uses: szenius/set-timezone@v2.0
|
||||
uses: szenius/set-timezone@v1.2
|
||||
with:
|
||||
timezoneLinux: "America/Los_Angeles"
|
||||
|
||||
- name: Set PR number
|
||||
id: pr_number
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "pull_request" ]; then
|
||||
echo "pr_number=${{ github.event.pull_request.number }}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "pr_number=${{ inputs.pr_number }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: refs/pull/${{ github.event.pull_request.base.ref }}/merge
|
||||
|
||||
- name: Download artifact
|
||||
uses: dawidd6/action-download-artifact@v11
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name_is_regexp: true
|
||||
name: unraid-plugin-.*
|
||||
name: connect-files
|
||||
path: connect-files
|
||||
pr: ${{ steps.pr_number.outputs.pr_number }}
|
||||
workflow_conclusion: success
|
||||
workflow_search: true
|
||||
search_artifacts: true
|
||||
|
||||
- name: Update Downloaded Staging Plugin to New Date
|
||||
run: |
|
||||
# Find the .plg file in the downloaded artifact
|
||||
plgfile=$(find connect-files -name "*.plg" -type f | head -1)
|
||||
if [ ! -f "$plgfile" ]; then
|
||||
echo "ERROR: .plg file not found in connect-files/"
|
||||
ls -la connect-files/
|
||||
if [ ! -f "connect-files/plugins/dynamix.unraid.net.pr.plg" ]; then
|
||||
echo "ERROR: dynamix.unraid.net.pr.plg not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Found plugin file: $plgfile"
|
||||
plgfile="connect-files/plugins/dynamix.unraid.net.pr.plg"
|
||||
version=$(date +"%Y.%m.%d.%H%M")
|
||||
sed -i -E "s#(<!ENTITY version \").*(\">)#\1${version}\2#g" "${plgfile}" || exit 1
|
||||
|
||||
# Change the plugin url to point to staging
|
||||
url="https://preview.dl.unraid.net/unraid-api/dynamix.unraid.net.plg"
|
||||
sed -i -E "s#(<!ENTITY plugin_url \").*?(\">)#\1${url}\2#g" "${plgfile}" || exit 1
|
||||
sed -i -E "s#(<!ENTITY pluginURL \").*(\">)#\1${url}\2#g" "${plgfile}" || exit 1
|
||||
cat "${plgfile}"
|
||||
mkdir -p pr-release
|
||||
mv "${plgfile}" pr-release/dynamix.unraid.net.plg
|
||||
@@ -79,4 +54,4 @@ jobs:
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
AWS_REGION: "auto"
|
||||
SOURCE_DIR: pr-release
|
||||
DEST_DIR: unraid-api/tag/PR${{ steps.pr_number.outputs.pr_number }}
|
||||
DEST_DIR: unraid-api/pr/${{ github.event.pull_request.number }}
|
||||
|
||||
124
.github/workflows/release-production.yml
vendored
124
.github/workflows/release-production.yml
vendored
@@ -1,14 +1,11 @@
|
||||
name: Publish Release
|
||||
name: Publish Release to Digital Ocean
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: 'Tag to release - will replace active release'
|
||||
required: true
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
publish-to-digital-ocean:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
@@ -19,7 +16,7 @@ jobs:
|
||||
regex: true
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
target: "./"
|
||||
version: ${{ inputs.version && format('tags/{0}', inputs.version) || 'latest' }}
|
||||
version: "latest"
|
||||
|
||||
- uses: cardinalby/git-get-release-action@v1
|
||||
id: release-info
|
||||
@@ -28,99 +25,32 @@ jobs:
|
||||
with:
|
||||
latest: true
|
||||
prerelease: false
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '22.17.0'
|
||||
- run: |
|
||||
cat << 'EOF' > release-notes.txt
|
||||
- name: Get Release Changelog
|
||||
run: |
|
||||
notes=$(cat << EOF
|
||||
${{ steps.release-info.outputs.body }}
|
||||
EOF
|
||||
- run: npm install html-escaper@2 xml2js
|
||||
- name: Update Plugin Changelog
|
||||
uses: actions/github-script@v7
|
||||
)
|
||||
escapedNotes=$(sed -e 's/[&\\/]/\\&/g; s/$/\\/' -e '$s/\\$//' <<<"$notes")
|
||||
sed -i -z -E "s/<CHANGES>(.*)<\/CHANGES>/<CHANGES>\n${escapedNotes}\n<\/CHANGES>/g" "dynamix.unraid.net.plg"
|
||||
|
||||
- name: Upload All Release Files to DO Spaces
|
||||
uses: BetaHuhn/do-spaces-action@v2
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
const { escape } = require('html-escaper');
|
||||
access_key: ${{ secrets.DO_ACCESS_KEY }}
|
||||
secret_key: ${{ secrets.DO_SECRET_KEY }}
|
||||
space_name: ${{ secrets.DO_SPACE_NAME }}
|
||||
space_region: ${{ secrets.DO_SPACE_REGION }}
|
||||
source: "."
|
||||
out_dir: unraid-api
|
||||
|
||||
const releaseNotes = escape(fs.readFileSync('release-notes.txt', 'utf8'));
|
||||
|
||||
if (!releaseNotes) {
|
||||
console.error('No release notes found');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Read the plugin file
|
||||
const pluginPath = 'dynamix.unraid.net.plg';
|
||||
|
||||
if (!fs.existsSync(pluginPath)) {
|
||||
console.error('Plugin file not found:', pluginPath);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
let pluginContent = fs.readFileSync(pluginPath, 'utf8');
|
||||
|
||||
// Replace the changelog section using CDATA
|
||||
pluginContent = pluginContent.replace(
|
||||
/<CHANGES>[\s\S]*?<\/CHANGES>/,
|
||||
`<CHANGES>\n${releaseNotes}\n</CHANGES>`
|
||||
);
|
||||
|
||||
// Validate the plugin file is valid XML
|
||||
const xml2js = require('xml2js');
|
||||
const parser = new xml2js.Parser({
|
||||
explicitCharkey: true,
|
||||
trim: true,
|
||||
explicitRoot: true,
|
||||
explicitArray: false,
|
||||
attrkey: 'ATTR',
|
||||
charkey: 'TEXT',
|
||||
xmlnskey: 'XMLNS',
|
||||
normalizeTags: false,
|
||||
normalize: false,
|
||||
strict: false // Try with less strict parsing
|
||||
});
|
||||
parser.parseStringPromise(pluginContent).then((result) => {
|
||||
if (!result) {
|
||||
console.error('Plugin file is not valid XML');
|
||||
process.exit(1);
|
||||
}
|
||||
console.log('Plugin file is valid XML');
|
||||
|
||||
// Write back to file
|
||||
fs.writeFileSync(pluginPath, pluginContent);
|
||||
}).catch((err) => {
|
||||
console.error('Plugin file is not valid XML', err);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
- name: Cleanup Inline Scripts
|
||||
run: |
|
||||
rm -rf node_modules/
|
||||
- name: Upload Release Files to DO Spaces
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.DO_ACCESS_KEY }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.DO_SECRET_KEY }}
|
||||
AWS_DEFAULT_REGION: ${{ secrets.DO_SPACE_REGION }}
|
||||
AWS_ENDPOINT_URL: https://${{ secrets.DO_SPACE_REGION }}.digitaloceanspaces.com
|
||||
run: |
|
||||
# Upload files with explicit content encoding and public-read ACL
|
||||
aws s3 sync . s3://${{ secrets.DO_SPACE_NAME }}/unraid-api \
|
||||
--checksum-algorithm CRC32 \
|
||||
--no-guess-mime-type \
|
||||
--content-encoding none \
|
||||
--acl public-read
|
||||
|
||||
- name: Upload Release Files to Cloudflare Bucket
|
||||
- name: Upload Staging Plugin to Cloudflare Bucket
|
||||
uses: jakejarvis/s3-sync-action@v0.5.1
|
||||
env:
|
||||
AWS_S3_ENDPOINT: ${{ secrets.CF_ENDPOINT }}
|
||||
AWS_S3_BUCKET: ${{ secrets.CF_BUCKET }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: auto
|
||||
AWS_ENDPOINT_URL: ${{ secrets.CF_ENDPOINT }}
|
||||
run: |
|
||||
# Upload files with explicit content encoding and public-read ACL
|
||||
aws s3 sync . s3://${{ secrets.CF_BUCKET }}/unraid-api \
|
||||
--checksum-algorithm CRC32 \
|
||||
--no-guess-mime-type \
|
||||
--content-encoding none \
|
||||
--acl public-read
|
||||
AWS_REGION: 'auto'
|
||||
SOURCE_DIR: "."
|
||||
DEST_DIR: unraid-api
|
||||
|
||||
4
.github/workflows/test-libvirt.yml
vendored
4
.github/workflows/test-libvirt.yml
vendored
@@ -28,10 +28,10 @@ jobs:
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.13.5"
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Cache APT Packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.5.1
|
||||
uses: awalsh128/cache-apt-pkgs-action@v1.4.3
|
||||
with:
|
||||
packages: libvirt-dev
|
||||
version: 1.0
|
||||
|
||||
16
.gitignore
vendored
16
.gitignore
vendored
@@ -88,24 +88,10 @@ deploy/*
|
||||
.nitro
|
||||
.cache
|
||||
.output
|
||||
.env*
|
||||
!.env.example
|
||||
|
||||
fb_keepalive
|
||||
|
||||
# pnpm store
|
||||
.pnpm-store
|
||||
|
||||
# Nix
|
||||
result
|
||||
result-*
|
||||
.direnv/
|
||||
.envrc
|
||||
|
||||
# Webgui sync script helpers
|
||||
web/scripts/.sync-webgui-repo-*
|
||||
|
||||
# Activation code data
|
||||
plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/data/activation-data.php
|
||||
|
||||
# Config file that changes between versions
|
||||
api/dev/Unraid.net/myservers.cfg
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
if [ "$SKIP_SIMPLE_GIT_HOOKS" = "1" ]; then
|
||||
echo "[INFO] SKIP_SIMPLE_GIT_HOOKS is set to 1, skipping hook."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ -f "$SIMPLE_GIT_HOOKS_RC" ]; then
|
||||
. "$SIMPLE_GIT_HOOKS_RC"
|
||||
fi
|
||||
|
||||
pnpm lint-staged
|
||||
@@ -1 +0,0 @@
|
||||
1.69.1
|
||||
@@ -1 +1 @@
|
||||
{".":"4.10.0"}
|
||||
{"api":"4.1.2","web":"4.1.2","unraid-ui":"4.1.2","plugin":"4.1.2"}
|
||||
27
.vscode/settings.json
vendored
27
.vscode/settings.json
vendored
@@ -1,14 +1,15 @@
|
||||
{
|
||||
"files.associations": {
|
||||
"*.page": "php"
|
||||
},
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll": "never",
|
||||
"source.fixAll.eslint": "explicit"
|
||||
},
|
||||
"i18n-ally.localesPaths": ["locales"],
|
||||
"i18n-ally.keystyle": "flat",
|
||||
"eslint.experimental.useFlatConfig": true,
|
||||
"typescript.preferences.importModuleSpecifier": "non-relative",
|
||||
"javascript.preferences.importModuleSpecifier": "non-relative"
|
||||
}
|
||||
"files.associations": {
|
||||
"*.page": "php"
|
||||
},
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll": "never",
|
||||
"source.fixAll.eslint": "explicit"
|
||||
},
|
||||
"i18n-ally.localesPaths": [
|
||||
"locales"
|
||||
],
|
||||
"i18n-ally.keystyle": "flat",
|
||||
"eslint.experimental.useFlatConfig": true
|
||||
}
|
||||
|
||||
137
CLAUDE.md
137
CLAUDE.md
@@ -1,137 +0,0 @@
|
||||
# CLAUDE.md
|
||||
|
||||
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||
|
||||
## Project Overview
|
||||
|
||||
This is the Unraid API monorepo containing multiple packages that provide API functionality for Unraid servers. It uses pnpm workspaces with the following structure:
|
||||
|
||||
- `/api` - Core NestJS API server with GraphQL
|
||||
- `/web` - Nuxt.js frontend application
|
||||
- `/unraid-ui` - Vue 3 component library
|
||||
- `/plugin` - Unraid plugin package (.plg)
|
||||
- `/packages` - Shared packages and API plugins
|
||||
|
||||
## Essential Commands
|
||||
|
||||
### Development
|
||||
|
||||
```bash
|
||||
pnpm install # Install all dependencies
|
||||
pnpm dev # Run all dev servers concurrently
|
||||
pnpm build # Build all packages
|
||||
pnpm build:watch # Watch mode with local plugin build
|
||||
```
|
||||
|
||||
### Testing & Code Quality
|
||||
|
||||
```bash
|
||||
pnpm test # Run all tests
|
||||
pnpm lint # Run linting
|
||||
pnpm lint:fix # Fix linting issues
|
||||
pnpm type-check # TypeScript type checking
|
||||
```
|
||||
|
||||
### API Development
|
||||
|
||||
```bash
|
||||
cd api && pnpm dev # Run API server (http://localhost:3001)
|
||||
cd api && pnpm test:watch # Run tests in watch mode
|
||||
cd api && pnpm codegen # Generate GraphQL types
|
||||
```
|
||||
|
||||
### Deployment
|
||||
|
||||
```bash
|
||||
pnpm unraid:deploy <SERVER_IP> # Deploy all to Unraid server
|
||||
```
|
||||
|
||||
## Architecture Notes
|
||||
|
||||
### API Structure (NestJS)
|
||||
|
||||
- Modules: `auth`, `config`, `plugins`, `emhttp`, `monitoring`
|
||||
- GraphQL API with Apollo Server at `/graphql`
|
||||
- Redux store for state management in `src/store/`
|
||||
- Plugin system for extending functionality
|
||||
- Entry points: `src/index.ts` (server), `src/cli.ts` (CLI)
|
||||
|
||||
### Key Patterns
|
||||
|
||||
- TypeScript imports use `.js` extensions (ESM compatibility)
|
||||
- NestJS dependency injection with decorators
|
||||
- GraphQL schema-first approach with code generation
|
||||
- API plugins follow specific structure (see `api/docs/developer/api-plugins.md`)
|
||||
|
||||
### Authentication
|
||||
|
||||
- API key authentication via headers
|
||||
- Cookie-based session management
|
||||
- Keys stored in `/boot/config/plugins/unraid-api/`
|
||||
|
||||
### Development Workflow
|
||||
|
||||
1. Work Intent required before starting development
|
||||
2. Fork from `main` branch
|
||||
3. Reference Work Intent in PR
|
||||
4. No direct pushes to main
|
||||
|
||||
### Debug Mode
|
||||
|
||||
```bash
|
||||
LOG_LEVEL=debug unraid-api start --debug
|
||||
```
|
||||
|
||||
Enables GraphQL playground at `http://tower.local/graphql`
|
||||
|
||||
## Coding Guidelines
|
||||
|
||||
### General Rules
|
||||
|
||||
- Never add comments unless they are needed for clarity of function
|
||||
- Never add comments for obvious things, and avoid commenting when starting and ending code blocks
|
||||
- Be CONCISE, keep replies shorter than a paragraph if at all possible
|
||||
|
||||
### API Development Rules (`api/**/*`)
|
||||
|
||||
- Use pnpm ONLY for package management
|
||||
- Always run scripts from api/package.json unless requested
|
||||
- Prefer adding new files to the NestJS repo located at `api/src/unraid-api/` instead of the legacy code
|
||||
- Test suite is VITEST, do not use jest
|
||||
- Run tests with: `pnpm --filter ./api test`
|
||||
- Prefer to not mock simple dependencies
|
||||
|
||||
### Web Development Rules (`web/**/*`)
|
||||
|
||||
- Always run `pnpm codegen` for GraphQL code generation in the web directory
|
||||
- GraphQL queries must be placed in `.query.ts` files
|
||||
- GraphQL mutations must be placed in `.mutation.ts` files
|
||||
- All GraphQL under `web/` must follow this naming convention
|
||||
|
||||
### Testing Guidelines
|
||||
|
||||
#### Vue Component Testing
|
||||
|
||||
- This is a Nuxt.js app but we are testing with vitest outside of the Nuxt environment
|
||||
- Nuxt is currently set to auto import so some vue files may need compute or ref imported
|
||||
- Use pnpm when running terminal commands and stay within the web directory
|
||||
- Tests are located under `web/__test__`, run with `pnpm test`
|
||||
- Use `mount` from Vue Test Utils for component testing
|
||||
- Stub complex child components that aren't the focus of the test
|
||||
- Mock external dependencies and services
|
||||
- Test component behavior and output, not implementation details
|
||||
- Use `createTestingPinia()` for mocking stores in components
|
||||
- Find elements with semantic queries like `find('button')` rather than data-test IDs
|
||||
- Use `await nextTick()` for DOM updates
|
||||
- Always await async operations before making assertions
|
||||
|
||||
#### Store Testing with Pinia
|
||||
|
||||
- Use `createPinia()` and `setActivePinia` when testing Store files
|
||||
- Only use `createTestingPinia` if you specifically need its testing features
|
||||
- Let stores initialize with their natural default state
|
||||
- Don't mock the store being tested
|
||||
- Ensure Vue reactivity imports are added to store files (computed, ref, watchEffect)
|
||||
- Place all mock declarations at the top level
|
||||
- Use factory functions for module mocks to avoid hoisting issues
|
||||
- Clear mocks between tests to ensure isolation
|
||||
@@ -7,17 +7,6 @@ Thank you for your interest in contributing to Unraid Connect! We want to make c
|
||||
- Submitting a fix
|
||||
- Proposing new features
|
||||
|
||||
## TypeScript Import Extensions in the API Directory
|
||||
|
||||
When working with the API directory, you'll notice that TypeScript files are imported with `.js` extensions (e.g., `import { something } from './file.js'`) even though the actual files have `.ts` extensions. This is because:
|
||||
|
||||
1. We use ECMAScript modules (ESM) in our TypeScript configuration
|
||||
2. When TypeScript compiles `.ts` files to `.js`, the import paths in the compiled code need to reference `.js` files
|
||||
3. TypeScript doesn't automatically change the extensions in import statements during compilation
|
||||
4. Using `.js` extensions in imports ensures that both TypeScript during development and Node.js in production can resolve the modules correctly
|
||||
|
||||
This approach follows the [official TypeScript ESM recommendation](https://www.typescriptlang.org/docs/handbook/esm-node.html) and ensures compatibility across development and production environments.
|
||||
|
||||
## Development Process
|
||||
|
||||
We use GitHub to host code, to track issues and feature requests, as well as accept pull requests.
|
||||
@@ -58,14 +47,6 @@ We use GitHub to host code, to track issues and feature requests, as well as acc
|
||||
|
||||
**Note:** Direct pushes to the main branch are not allowed. All changes must go through the PR process.
|
||||
|
||||
## Developer Documentation
|
||||
|
||||
For detailed information about development workflows, repository organization, and other technical details, please refer to our developer documentation:
|
||||
|
||||
- [Development Guide](api/docs/developer/development.md) - Setup, building, and debugging instructions
|
||||
- [Development Workflows](api/docs/developer/workflows.md) - Detailed workflows for local development, building, and deployment
|
||||
- [Repository Organization](api/docs/developer/repo-organization.md) - High-level architecture and project structure
|
||||
|
||||
## Bug Reports and Feature Requests
|
||||
|
||||
We use GitHub issues to track bugs and feature requests:
|
||||
|
||||
352
LICENSE.txt
352
LICENSE.txt
@@ -1,352 +0,0 @@
|
||||
Project License Notice
|
||||
----------------------
|
||||
|
||||
This project is licensed under the terms of the GNU General Public License version 2,
|
||||
**or (at your option) any later version** published by the Free Software Foundation.
|
||||
|
||||
The full text of the GNU GPL v2.0 is provided below for reference.
|
||||
|
||||
----------------------
|
||||
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 2, June 1991
|
||||
|
||||
Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
|
||||
<https://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The licenses for most software are designed to take away your
|
||||
freedom to share and change it. By contrast, the GNU General Public
|
||||
License is intended to guarantee your freedom to share and change free
|
||||
software--to make sure the software is free for all its users. This
|
||||
General Public License applies to most of the Free Software
|
||||
Foundation's software and to any other program whose authors commit to
|
||||
using it. (Some other Free Software Foundation software is covered by
|
||||
the GNU Lesser General Public License instead.) You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
this service if you wish), that you receive source code or can get it
|
||||
if you want it, that you can change the software or use pieces of it
|
||||
in new free programs; and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to make restrictions that forbid
|
||||
anyone to deny you these rights or to ask you to surrender the rights.
|
||||
These restrictions translate to certain responsibilities for you if you
|
||||
distribute copies of the software, or if you modify it.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must give the recipients all the rights that
|
||||
you have. You must make sure that they, too, receive or can get the
|
||||
source code. And you must show them these terms so they know their
|
||||
rights.
|
||||
|
||||
We protect your rights with two steps: (1) copyright the software, and
|
||||
(2) offer you this license which gives you legal permission to copy,
|
||||
distribute and/or modify the software.
|
||||
|
||||
Also, for each author's protection and ours, we want to make certain
|
||||
that everyone understands that there is no warranty for this free
|
||||
software. If the software is modified by someone else and passed on, we
|
||||
want its recipients to know that what they have is not the original, so
|
||||
that any problems introduced by others will not reflect on the original
|
||||
authors' reputations.
|
||||
|
||||
Finally, any free program is threatened constantly by software
|
||||
patents. We wish to avoid the danger that redistributors of a free
|
||||
program will individually obtain patent licenses, in effect making the
|
||||
program proprietary. To prevent this, we have made it clear that any
|
||||
patent must be licensed for everyone's free use or not licensed at all.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||
|
||||
0. This License applies to any program or other work which contains
|
||||
a notice placed by the copyright holder saying it may be distributed
|
||||
under the terms of this General Public License. The "Program", below,
|
||||
refers to any such program or work, and a "work based on the Program"
|
||||
means either the Program or any derivative work under copyright law:
|
||||
that is to say, a work containing the Program or a portion of it,
|
||||
either verbatim or with modifications and/or translated into another
|
||||
language. (Hereinafter, translation is included without limitation in
|
||||
the term "modification".) Each licensee is addressed as "you".
|
||||
|
||||
Activities other than copying, distribution and modification are not
|
||||
covered by this License; they are outside its scope. The act of
|
||||
running the Program is not restricted, and the output from the Program
|
||||
is covered only if its contents constitute a work based on the
|
||||
Program (independent of having been made by running the Program).
|
||||
Whether that is true depends on what the Program does.
|
||||
|
||||
1. You may copy and distribute verbatim copies of the Program's
|
||||
source code as you receive it, in any medium, provided that you
|
||||
conspicuously and appropriately publish on each copy an appropriate
|
||||
copyright notice and disclaimer of warranty; keep intact all the
|
||||
notices that refer to this License and to the absence of any warranty;
|
||||
and give any other recipients of the Program a copy of this License
|
||||
along with the Program.
|
||||
|
||||
You may charge a fee for the physical act of transferring a copy, and
|
||||
you may at your option offer warranty protection in exchange for a fee.
|
||||
|
||||
2. You may modify your copy or copies of the Program or any portion
|
||||
of it, thus forming a work based on the Program, and copy and
|
||||
distribute such modifications or work under the terms of Section 1
|
||||
above, provided that you also meet all of these conditions:
|
||||
|
||||
a) You must cause the modified files to carry prominent notices
|
||||
stating that you changed the files and the date of any change.
|
||||
|
||||
b) You must cause any work that you distribute or publish, that in
|
||||
whole or in part contains or is derived from the Program or any
|
||||
part thereof, to be licensed as a whole at no charge to all third
|
||||
parties under the terms of this License.
|
||||
|
||||
c) If the modified program normally reads commands interactively
|
||||
when run, you must cause it, when started running for such
|
||||
interactive use in the most ordinary way, to print or display an
|
||||
announcement including an appropriate copyright notice and a
|
||||
notice that there is no warranty (or else, saying that you provide
|
||||
a warranty) and that users may redistribute the program under
|
||||
these conditions, and telling the user how to view a copy of this
|
||||
License. (Exception: if the Program itself is interactive but
|
||||
does not normally print such an announcement, your work based on
|
||||
the Program is not required to print an announcement.)
|
||||
|
||||
These requirements apply to the modified work as a whole. If
|
||||
identifiable sections of that work are not derived from the Program,
|
||||
and can be reasonably considered independent and separate works in
|
||||
themselves, then this License, and its terms, do not apply to those
|
||||
sections when you distribute them as separate works. But when you
|
||||
distribute the same sections as part of a whole which is a work based
|
||||
on the Program, the distribution of the whole must be on the terms of
|
||||
this License, whose permissions for other licensees extend to the
|
||||
entire whole, and thus to each and every part regardless of who wrote it.
|
||||
|
||||
Thus, it is not the intent of this section to claim rights or contest
|
||||
your rights to work written entirely by you; rather, the intent is to
|
||||
exercise the right to control the distribution of derivative or
|
||||
collective works based on the Program.
|
||||
|
||||
In addition, mere aggregation of another work not based on the Program
|
||||
with the Program (or with a work based on the Program) on a volume of
|
||||
a storage or distribution medium does not bring the other work under
|
||||
the scope of this License.
|
||||
|
||||
3. You may copy and distribute the Program (or a work based on it,
|
||||
under Section 2) in object code or executable form under the terms of
|
||||
Sections 1 and 2 above provided that you also do one of the following:
|
||||
|
||||
a) Accompany it with the complete corresponding machine-readable
|
||||
source code, which must be distributed under the terms of Sections
|
||||
1 and 2 above on a medium customarily used for software interchange; or,
|
||||
|
||||
b) Accompany it with a written offer, valid for at least three
|
||||
years, to give any third party, for a charge no more than your
|
||||
cost of physically performing source distribution, a complete
|
||||
machine-readable copy of the corresponding source code, to be
|
||||
distributed under the terms of Sections 1 and 2 above on a medium
|
||||
customarily used for software interchange; or,
|
||||
|
||||
c) Accompany it with the information you received as to the offer
|
||||
to distribute corresponding source code. (This alternative is
|
||||
allowed only for noncommercial distribution and only if you
|
||||
received the program in object code or executable form with such
|
||||
an offer, in accord with Subsection b above.)
|
||||
|
||||
The source code for a work means the preferred form of the work for
|
||||
making modifications to it. For an executable work, complete source
|
||||
code means all the source code for all modules it contains, plus any
|
||||
associated interface definition files, plus the scripts used to
|
||||
control compilation and installation of the executable. However, as a
|
||||
special exception, the source code distributed need not include
|
||||
anything that is normally distributed (in either source or binary
|
||||
form) with the major components (compiler, kernel, and so on) of the
|
||||
operating system on which the executable runs, unless that component
|
||||
itself accompanies the executable.
|
||||
|
||||
If distribution of executable or object code is made by offering
|
||||
access to copy from a designated place, then offering equivalent
|
||||
access to copy the source code from the same place counts as
|
||||
distribution of the source code, even though third parties are not
|
||||
compelled to copy the source along with the object code.
|
||||
|
||||
4. You may not copy, modify, sublicense, or distribute the Program
|
||||
except as expressly provided under this License. Any attempt
|
||||
otherwise to copy, modify, sublicense or distribute the Program is
|
||||
void, and will automatically terminate your rights under this License.
|
||||
However, parties who have received copies, or rights, from you under
|
||||
this License will not have their licenses terminated so long as such
|
||||
parties remain in full compliance.
|
||||
|
||||
5. You are not required to accept this License, since you have not
|
||||
signed it. However, nothing else grants you permission to modify or
|
||||
distribute the Program or its derivative works. These actions are
|
||||
prohibited by law if you do not accept this License. Therefore, by
|
||||
modifying or distributing the Program (or any work based on the
|
||||
Program), you indicate your acceptance of this License to do so, and
|
||||
all its terms and conditions for copying, distributing or modifying
|
||||
the Program or works based on it.
|
||||
|
||||
6. Each time you redistribute the Program (or any work based on the
|
||||
Program), the recipient automatically receives a license from the
|
||||
original licensor to copy, distribute or modify the Program subject to
|
||||
these terms and conditions. You may not impose any further
|
||||
restrictions on the recipients' exercise of the rights granted herein.
|
||||
You are not responsible for enforcing compliance by third parties to
|
||||
this License.
|
||||
|
||||
7. If, as a consequence of a court judgment or allegation of patent
|
||||
infringement or for any other reason (not limited to patent issues),
|
||||
conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot
|
||||
distribute so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you
|
||||
may not distribute the Program at all. For example, if a patent
|
||||
license would not permit royalty-free redistribution of the Program by
|
||||
all those who receive copies directly or indirectly through you, then
|
||||
the only way you could satisfy both it and this License would be to
|
||||
refrain entirely from distribution of the Program.
|
||||
|
||||
If any portion of this section is held invalid or unenforceable under
|
||||
any particular circumstance, the balance of the section is intended to
|
||||
apply and the section as a whole is intended to apply in other
|
||||
circumstances.
|
||||
|
||||
It is not the purpose of this section to induce you to infringe any
|
||||
patents or other property right claims or to contest validity of any
|
||||
such claims; this section has the sole purpose of protecting the
|
||||
integrity of the free software distribution system, which is
|
||||
implemented by public license practices. Many people have made
|
||||
generous contributions to the wide range of software distributed
|
||||
through that system in reliance on consistent application of that
|
||||
system; it is up to the author/donor to decide if he or she is willing
|
||||
to distribute software through any other system and a licensee cannot
|
||||
impose that choice.
|
||||
|
||||
This section is intended to make thoroughly clear what is believed to
|
||||
be a consequence of the rest of this License.
|
||||
|
||||
8. If the distribution and/or use of the Program is restricted in
|
||||
certain countries either by patents or by copyrighted interfaces, the
|
||||
original copyright holder who places the Program under this License
|
||||
may add an explicit geographical distribution limitation excluding
|
||||
those countries, so that distribution is permitted only in or among
|
||||
countries not thus excluded. In such case, this License incorporates
|
||||
the limitation as if written in the body of this License.
|
||||
|
||||
9. The Free Software Foundation may publish revised and/or new versions
|
||||
of the General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the Program
|
||||
specifies a version number of this License which applies to it and "any
|
||||
later version", you have the option of following the terms and conditions
|
||||
either of that version or of any later version published by the Free
|
||||
Software Foundation. If the Program does not specify a version number of
|
||||
this License, you may choose any version ever published by the Free Software
|
||||
Foundation.
|
||||
|
||||
10. If you wish to incorporate parts of the Program into other free
|
||||
programs whose distribution conditions are different, write to the author
|
||||
to ask for permission. For software which is copyrighted by the Free
|
||||
Software Foundation, write to the Free Software Foundation; we sometimes
|
||||
make exceptions for this. Our decision will be guided by the two goals
|
||||
of preserving the free status of all derivatives of our free software and
|
||||
of promoting the sharing and reuse of software generally.
|
||||
|
||||
NO WARRANTY
|
||||
|
||||
11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
|
||||
FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
|
||||
OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
|
||||
PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
|
||||
OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
|
||||
TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
|
||||
PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
|
||||
REPAIR OR CORRECTION.
|
||||
|
||||
12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
|
||||
REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
|
||||
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
|
||||
OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
|
||||
TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
|
||||
YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
|
||||
PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGES.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
convey the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
Unraid API - Core API functionality for Unraid systems
|
||||
Copyright (C) 2024 Lime Technology, Inc.
|
||||
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation; either version 2 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License along
|
||||
with this program; if not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
For questions about licensing or to report issues:
|
||||
- Website: https://unraid.net
|
||||
- Email: support@unraid.net
|
||||
|
||||
If the program is interactive, make it output a short notice like this
|
||||
when it starts in an interactive mode:
|
||||
|
||||
Gnomovision version 69, Copyright (C) year name of author
|
||||
Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, the commands you use may
|
||||
be called something other than `show w' and `show c'; they could even be
|
||||
mouse-clicks or menu items--whatever suits your program.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or your
|
||||
school, if any, to sign a "copyright disclaimer" for the program, if
|
||||
necessary. Here is a sample; alter the names:
|
||||
|
||||
Yoyodyne, Inc., hereby disclaims all copyright interest in the program
|
||||
`Gnomovision' (which makes passes at compilers) written by James Hacker.
|
||||
|
||||
<signature of Moe Ghoul>, 1 April 1989
|
||||
Moe Ghoul, President of Vice
|
||||
|
||||
This General Public License does not permit incorporating your program into
|
||||
proprietary programs. If your program is a subroutine library, you may
|
||||
consider it more useful to permit linking proprietary applications with the
|
||||
library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License.
|
||||
@@ -9,12 +9,6 @@ PATHS_MY_SERVERS_CONFIG=./dev/Unraid.net/myservers.cfg # My servers config file
|
||||
PATHS_MY_SERVERS_FB=./dev/Unraid.net/fb_keepalive # My servers flashbackup timekeeper file
|
||||
PATHS_KEYFILE_BASE=./dev/Unraid.net # Keyfile location
|
||||
PATHS_MACHINE_ID=./dev/data/machine-id
|
||||
PATHS_PARITY_CHECKS=./dev/states/parity-checks.log
|
||||
PATHS_CONFIG_MODULES=./dev/configs
|
||||
PATHS_ACTIVATION_BASE=./dev/activation
|
||||
PATHS_PASSWD=./dev/passwd
|
||||
PATHS_RCLONE_SOCKET=./dev/rclone-socket
|
||||
PATHS_LOG_BASE=./dev/log # Where we store logs
|
||||
ENVIRONMENT="development"
|
||||
NODE_ENV="development"
|
||||
PORT="3001"
|
||||
@@ -26,4 +20,4 @@ BYPASS_PERMISSION_CHECKS=false
|
||||
BYPASS_CORS_CHECKS=true
|
||||
CHOKIDAR_USEPOLLING=true
|
||||
LOG_TRANSPORT=console
|
||||
LOG_LEVEL=trace
|
||||
LOG_LEVEL=trace
|
||||
@@ -1,5 +0,0 @@
|
||||
ENVIRONMENT="production"
|
||||
NODE_ENV="production"
|
||||
PORT="/var/run/unraid-api.sock"
|
||||
MOTHERSHIP_GRAPHQL_LINK="https://mothership.unraid.net/ws"
|
||||
PATHS_CONFIG_MODULES="/boot/config/plugins/dynamix.my.servers/configs"
|
||||
@@ -1,5 +0,0 @@
|
||||
ENVIRONMENT="staging"
|
||||
NODE_ENV="production"
|
||||
PORT="/var/run/unraid-api.sock"
|
||||
MOTHERSHIP_GRAPHQL_LINK="https://staging.mothership.unraid.net/ws"
|
||||
PATHS_CONFIG_MODULES="/boot/config/plugins/dynamix.my.servers/configs"
|
||||
@@ -9,9 +9,5 @@ PATHS_MY_SERVERS_CONFIG=./dev/Unraid.net/myservers.cfg # My servers config file
|
||||
PATHS_MY_SERVERS_FB=./dev/Unraid.net/fb_keepalive # My servers flashbackup timekeeper file
|
||||
PATHS_KEYFILE_BASE=./dev/Unraid.net # Keyfile location
|
||||
PATHS_MACHINE_ID=./dev/data/machine-id
|
||||
PATHS_PARITY_CHECKS=./dev/states/parity-checks.log
|
||||
PATHS_CONFIG_MODULES=./dev/configs
|
||||
PATHS_ACTIVATION_BASE=./dev/activation
|
||||
PATHS_PASSWD=./dev/passwd
|
||||
PORT=5000
|
||||
NODE_ENV="test"
|
||||
NODE_ENV="test"
|
||||
@@ -1,5 +1,4 @@
|
||||
import eslint from '@eslint/js';
|
||||
import importPlugin from 'eslint-plugin-import';
|
||||
import noRelativeImportPaths from 'eslint-plugin-no-relative-import-paths';
|
||||
import prettier from 'eslint-plugin-prettier';
|
||||
import tseslint from 'typescript-eslint';
|
||||
@@ -8,7 +7,6 @@ export default tseslint.config(eslint.configs.recommended, ...tseslint.configs.r
|
||||
plugins: {
|
||||
'no-relative-import-paths': noRelativeImportPaths,
|
||||
prettier: prettier,
|
||||
import: importPlugin,
|
||||
},
|
||||
rules: {
|
||||
'@typescript-eslint/no-redundant-type-constituents': 'off',
|
||||
@@ -24,6 +22,7 @@ export default tseslint.config(eslint.configs.recommended, ...tseslint.configs.r
|
||||
'@typescript-eslint/no-unused-vars': 'off',
|
||||
'@typescript-eslint/no-unused-expressions': 'off',
|
||||
'import/no-unresolved': 'off',
|
||||
'import/extensions': 'off',
|
||||
'import/no-absolute-path': 'off',
|
||||
'import/prefer-default-export': 'off',
|
||||
'no-relative-import-paths/no-relative-import-paths': [
|
||||
@@ -31,27 +30,6 @@ export default tseslint.config(eslint.configs.recommended, ...tseslint.configs.r
|
||||
{ allowSameFolder: false, rootDir: 'src', prefix: '@app' },
|
||||
],
|
||||
'prettier/prettier': 'error',
|
||||
'import/extensions': [
|
||||
'error',
|
||||
'ignorePackages',
|
||||
{
|
||||
js: 'always',
|
||||
ts: 'always',
|
||||
},
|
||||
],
|
||||
'no-restricted-globals': [
|
||||
'error',
|
||||
{
|
||||
name: '__dirname',
|
||||
message: 'Use import.meta.url instead of __dirname in ESM',
|
||||
},
|
||||
{
|
||||
name: '__filename',
|
||||
message: 'Use import.meta.url instead of __filename in ESM',
|
||||
},
|
||||
],
|
||||
'eol-last': ['error', 'always'],
|
||||
},
|
||||
|
||||
ignores: ['src/graphql/generated/client/**/*'],
|
||||
});
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
{
|
||||
schema: {
|
||||
files: 'src/graphql/schema/types/**/*.graphql'
|
||||
}
|
||||
}
|
||||
11
api/.vscode/extensions.json
vendored
11
api/.vscode/extensions.json
vendored
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"mikestead.dotenv",
|
||||
"eamodio.gitlens",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"antfu.goto-alias",
|
||||
"bierner.markdown-mermaid",
|
||||
"github.vscode-pull-request-github",
|
||||
"bierner.markdown-preview-github-styles"
|
||||
]
|
||||
}
|
||||
4
api/.vscode/settings.json
vendored
4
api/.vscode/settings.json
vendored
@@ -3,7 +3,5 @@
|
||||
"eslint.options": {
|
||||
"flags": ["unstable_ts_config"],
|
||||
"overrideConfigFile": ".eslintrc.ts"
|
||||
},
|
||||
"typescript.preferences.importModuleSpecifier": "non-relative",
|
||||
"javascript.preferences.importModuleSpecifier": "non-relative"
|
||||
}
|
||||
}
|
||||
|
||||
1046
api/CHANGELOG.md
1046
api/CHANGELOG.md
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,7 @@
|
||||
###########################################################
|
||||
# Development/Build Image
|
||||
###########################################################
|
||||
FROM node:22.17.0-bookworm-slim AS development
|
||||
FROM node:20-bookworm-slim AS development
|
||||
|
||||
# Install build tools and dependencies
|
||||
RUN apt-get update -y && apt-get install -y \
|
||||
@@ -42,4 +42,4 @@ ENV NODE_ENV=production
|
||||
|
||||
COPY . .
|
||||
|
||||
CMD ["pnpm", "run", "build:release"]
|
||||
CMD ["pnpm", "run", "build-and-pack"]
|
||||
159
api/codegen.ts
159
api/codegen.ts
@@ -1,68 +1,105 @@
|
||||
import type { CodegenConfig } from '@graphql-codegen/cli';
|
||||
|
||||
const config: CodegenConfig = {
|
||||
overwrite: true,
|
||||
emitLegacyCommonJSImports: false,
|
||||
verbose: true,
|
||||
config: {
|
||||
namingConvention: {
|
||||
enumValues: 'change-case-all#upperCase',
|
||||
transformUnderscore: true,
|
||||
useTypeImports: true,
|
||||
},
|
||||
scalars: {
|
||||
DateTime: 'string',
|
||||
Long: 'number',
|
||||
JSON: 'Record<string, any>',
|
||||
URL: 'URL',
|
||||
Port: 'number',
|
||||
UUID: 'string',
|
||||
},
|
||||
scalarSchemas: {
|
||||
URL: 'z.instanceof(URL)',
|
||||
Long: 'z.number()',
|
||||
JSON: 'z.record(z.string(), z.any())',
|
||||
Port: 'z.number()',
|
||||
UUID: 'z.string()',
|
||||
},
|
||||
overwrite: true,
|
||||
emitLegacyCommonJSImports: false,
|
||||
verbose: true,
|
||||
config: {
|
||||
namingConvention: {
|
||||
typeNames: './fix-array-type.cjs',
|
||||
enumValues: 'change-case#upperCase',
|
||||
useTypeImports: true,
|
||||
},
|
||||
generates: {
|
||||
// Generate Types for Mothership GraphQL Client
|
||||
'src/graphql/generated/client/': {
|
||||
documents: './src/graphql/mothership/*.ts',
|
||||
schema: {
|
||||
[process.env.MOTHERSHIP_GRAPHQL_LINK as string]: {
|
||||
headers: {
|
||||
origin: 'https://forums.unraid.net',
|
||||
},
|
||||
},
|
||||
},
|
||||
preset: 'client',
|
||||
presetConfig: {
|
||||
gqlTagName: 'graphql',
|
||||
},
|
||||
config: {
|
||||
useTypeImports: true,
|
||||
withObjectType: true,
|
||||
},
|
||||
plugins: [{ add: { content: '/* eslint-disable */' } }],
|
||||
},
|
||||
'src/graphql/generated/client/validators.ts': {
|
||||
schema: {
|
||||
[process.env.MOTHERSHIP_GRAPHQL_LINK as string]: {
|
||||
headers: {
|
||||
origin: 'https://forums.unraid.net',
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: ['typescript-validation-schema', { add: { content: '/* eslint-disable */' } }],
|
||||
config: {
|
||||
importFrom: '@app/graphql/generated/client/graphql.js',
|
||||
strictScalars: false,
|
||||
schema: 'zod',
|
||||
},
|
||||
},
|
||||
scalars: {
|
||||
DateTime: 'string',
|
||||
Long: 'number',
|
||||
JSON: '{ [key: string]: any }',
|
||||
URL: 'URL',
|
||||
Port: 'number',
|
||||
UUID: 'string',
|
||||
},
|
||||
},
|
||||
generates: {
|
||||
'src/graphql/generated/client/': {
|
||||
documents: './src/graphql/mothership/*.ts',
|
||||
schema: {
|
||||
[process.env.MOTHERSHIP_GRAPHQL_LINK as string]: {
|
||||
headers: {
|
||||
origin: 'https://forums.unraid.net',
|
||||
},
|
||||
},
|
||||
},
|
||||
preset: 'client',
|
||||
presetConfig: {
|
||||
gqlTagName: 'graphql',
|
||||
},
|
||||
config: {
|
||||
useTypeImports: true,
|
||||
withObjectType: true,
|
||||
},
|
||||
plugins: [
|
||||
{ add: { content: '/* eslint-disable */' } },
|
||||
],
|
||||
},
|
||||
// Generate Types for the API Server
|
||||
'src/graphql/generated/api/types.ts': {
|
||||
schema: [
|
||||
'./src/graphql/types.ts',
|
||||
'./src/graphql/schema/types/**/*.graphql',
|
||||
],
|
||||
plugins: [
|
||||
'typescript',
|
||||
'typescript-resolvers',
|
||||
{ add: { content: '/* eslint-disable */' } },
|
||||
],
|
||||
config: {
|
||||
contextType: '@app/graphql/schema/utils#Context',
|
||||
useIndexSignature: true,
|
||||
},
|
||||
},
|
||||
// Generate Operations for any built-in API Server Operations (e.g., report.ts)
|
||||
'src/graphql/generated/api/operations.ts': {
|
||||
documents: './src/graphql/client/api/*.ts',
|
||||
schema: [
|
||||
'./src/graphql/types.ts',
|
||||
'./src/graphql/schema/types/**/*.graphql',
|
||||
],
|
||||
preset: 'import-types',
|
||||
presetConfig: {
|
||||
typesPath: '@app/graphql/generated/api/types',
|
||||
},
|
||||
plugins: [
|
||||
'typescript-validation-schema',
|
||||
'typescript-operations',
|
||||
'typed-document-node',
|
||||
{ add: { content: '/* eslint-disable */' } },
|
||||
],
|
||||
config: {
|
||||
importFrom: '@app/graphql/generated/api/types',
|
||||
strictScalars: false,
|
||||
schema: 'zod',
|
||||
withObjectType: true,
|
||||
},
|
||||
},
|
||||
'src/graphql/generated/client/validators.ts': {
|
||||
schema: {
|
||||
[process.env.MOTHERSHIP_GRAPHQL_LINK as string]: {
|
||||
headers: {
|
||||
origin: 'https://forums.unraid.net',
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: [
|
||||
'typescript-validation-schema',
|
||||
{ add: { content: '/* eslint-disable */' } },
|
||||
],
|
||||
config: {
|
||||
importFrom: '@app/graphql/generated/client/graphql',
|
||||
strictScalars: false,
|
||||
schema: 'zod',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export default config;
|
||||
export default config;
|
||||
@@ -1 +0,0 @@
|
||||
module.exports = { extends: ['@commitlint/config-conventional'] };
|
||||
@@ -1 +0,0 @@
|
||||
┘[5╢╦Ояb┴ю└;R╛леЩ²ДА├y÷шd│яя╛Еlя▓ё"Hи╜ь;QДs≈@Вы▄╠╩1·Qy╓к|й╔+╨фM)X9jя▄тГО⌠1а2WHщ'│.ЕJё-╨MPгS╜╧:Ю▓]o9^ЮО0┴$"░ l^`╪>3к:╦я ЯО┤q~ёш≈└с ш5ёЗ=р╟─]╗IWf╥и ⌡?:У2ВоE5[р╨Ш(÷╤Е}з+о│ШIмAч²%╞╓дq:ё╤эb╣┼
|
||||
@@ -1,5 +1,5 @@
|
||||
[api]
|
||||
version="4.4.1"
|
||||
version="4.0.1"
|
||||
extraOrigins="https://google.com,https://test.com"
|
||||
[local]
|
||||
sandbox="yes"
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
[api]
|
||||
version="4.4.1"
|
||||
extraOrigins="https://google.com,https://test.com"
|
||||
[local]
|
||||
sandbox="yes"
|
||||
[remote]
|
||||
wanaccess="yes"
|
||||
wanport="8443"
|
||||
upnpEnabled="no"
|
||||
apikey="_______________________BIG_API_KEY_HERE_________________________"
|
||||
localApiKey="_______________________LOCAL_API_KEY_HERE_________________________"
|
||||
email="test@example.com"
|
||||
username="zspearmint"
|
||||
avatar="https://via.placeholder.com/200"
|
||||
regWizTime="1611175408732_0951-1653-3509-FBA155FA23C0"
|
||||
accesstoken=""
|
||||
idtoken=""
|
||||
refreshtoken=""
|
||||
dynamicRemoteAccessType="DISABLED"
|
||||
ssoSubIds=""
|
||||
@@ -1,13 +0,0 @@
|
||||
{
|
||||
"code": "EXAMPLE_CODE_123",
|
||||
"partnerName": "MyPartner Inc.",
|
||||
"partnerUrl": "https://partner.example.com",
|
||||
"serverName": "MyAwesomeServer",
|
||||
"sysModel": "CustomBuild v1.0",
|
||||
"comment": "This is a test activation code for development.",
|
||||
"header": "#336699",
|
||||
"headermetacolor": "#FFFFFF",
|
||||
"background": "#F0F0F0",
|
||||
"showBannerGradient": "yes",
|
||||
"theme": "black"
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
true
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 7.2 KiB |
@@ -1,19 +0,0 @@
|
||||
<?xml version="1.0" encoding="utf-8" ?>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="442" height="221">
|
||||
<defs>
|
||||
<linearGradient id="gradient_0" gradientUnits="userSpaceOnUse" x1="608.84924" y1="48.058002" x2="447.47684" y2="388.15295">
|
||||
<stop offset="0" stop-color="#ECC02F"/>
|
||||
<stop offset="1" stop-color="#B8436B"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<path fill="url(#gradient_0)" transform="scale(0.431641 0.431641)" d="M126.543 236.139C141.269 184.983 170.747 148.08 228.938 144.823C240.378 144.182 259.66 144.749 271.333 145.215C299.585 144.391 350.558 142.667 377.842 145.685C414.099 149.696 443.185 175.429 472.192 195.251L586.561 274.337C636.114 308.874 627.234 309.151 685.21 309.042L778.304 309.082C799.091 309.099 813.482 308.867 828.82 292.529C857.893 261.561 843.003 209.317 800.506 200.17C790.505 198.018 779.334 199.535 769.11 199.523L702.658 199.488C690.005 186.062 675.199 151.817 658.182 145.215L739.199 145.198C765.636 145.196 796.164 142.886 821.565 150.344C923.889 180.389 922.324 331.136 816.611 357.807C802.524 361.361 788.425 361.034 774.035 361.031L663.497 361.009C623.773 360.859 603.599 349.313 572.35 327.596L430.421 229.848C415.731 219.804 401.419 209.118 386.451 199.488C377.579 199.501 368.42 200.01 359.582 199.488L272.561 199.497C258.582 199.485 235.352 198.06 222.607 200.981C192.741 207.825 177.956 234.361 180.015 263.294C177.545 260.392 178.63 254.678 178.838 251.164C179.877 233.569 187.409 224.968 197.345 212.22C184.786 202.853 156.933 193.749 149.447 186.645C143.454 196.583 136.881 205.628 132.955 216.732C130.766 222.921 130.678 230.967 127.506 236.625L126.543 236.139Z"/>
|
||||
<path fill="#308DAF" transform="scale(0.431641 0.431641)" d="M149.447 186.645C156.933 193.749 184.786 202.853 197.345 212.22C187.409 224.968 179.877 233.569 178.838 251.164C178.63 254.678 177.545 260.392 180.015 263.294C192.489 309.751 221.563 309.078 263.512 309.07L322.096 309.048C333.708 325.984 348.958 344.904 361.795 361.006L232.654 361.03C176.801 360.579 130.605 315.939 126.498 260.613C125.893 252.473 126.453 244.293 126.543 236.139L127.506 236.625C130.678 230.967 130.766 222.921 132.955 216.732C136.881 205.628 143.454 196.583 149.447 186.645Z"/>
|
||||
<defs>
|
||||
<linearGradient id="gradient_1" gradientUnits="userSpaceOnUse" x1="620.42566" y1="140.57172" x2="611.08759" y2="282.2207">
|
||||
<stop offset="0" stop-color="#F5A22C"/>
|
||||
<stop offset="1" stop-color="#E17543"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<path fill="url(#gradient_1)" transform="scale(0.431641 0.431641)" d="M570.215 137.504C646.214 133.055 670.623 188.789 707.064 241.977L726.71 270.658C729.065 274.1 737.591 284.13 737.576 287.916L674.645 287.916C674.5 287.132 659.251 264.134 658.182 263.294C658.133 262.92 623.915 212.832 620.593 208.697C602.652 186.369 565.856 181.796 545.393 203.424C542.002 207.007 539.705 211.779 535.713 214.764C534.409 212.586 496.093 187.105 490.641 183.32C508.306 154.99 539.004 142.872 570.215 137.504Z"/>
|
||||
<path fill="#308DAF" transform="scale(0.431641 0.431641)" d="M286.656 221.485L350.512 221.485C354.248 227.374 358.556 232.986 362.565 238.698L379.9 263.82C397.44 289.065 410.994 321.185 447.698 317.317C464.599 315.536 476.472 305.449 486.751 292.741C494.293 298.818 530.089 320.341 533.124 324.28C532.441 328.231 526.229 334.319 522.861 336.255C521.587 339.958 509.164 348.519 505.635 350.88C463.781 378.879 411.472 377.537 373.808 343.464C365.331 335.795 359.734 326.969 353.351 317.641L336.798 293.614C320.035 269.591 302.915 245.863 286.656 221.485Z"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 3.4 KiB |
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"version": "4.9.5",
|
||||
"extraOrigins": [
|
||||
"https://google.com",
|
||||
"https://test.com"
|
||||
],
|
||||
"sandbox": true,
|
||||
"ssoSubIds": [],
|
||||
"plugins": [
|
||||
"unraid-api-plugin-connect"
|
||||
]
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
{
|
||||
"wanaccess": false,
|
||||
"wanport": 0,
|
||||
"upnpEnabled": false,
|
||||
"apikey": "",
|
||||
"localApiKey": "",
|
||||
"email": "",
|
||||
"username": "",
|
||||
"avatar": "",
|
||||
"regWizTime": "",
|
||||
"accesstoken": "",
|
||||
"idtoken": "",
|
||||
"refreshtoken": "",
|
||||
"dynamicRemoteAccessType": "DISABLED",
|
||||
"ssoSubIds": []
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
d0b5433294c110f1eed72bdb63910a9a
|
||||
@@ -1 +0,0 @@
|
||||
version="6.12.0-beta5"
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 61 KiB |
@@ -1 +0,0 @@
|
||||
case-model.png
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 4.5 KiB |
@@ -1,42 +1,36 @@
|
||||
[display]
|
||||
date=%c
|
||||
time=%I:%M %p
|
||||
number=.,
|
||||
scale=-1
|
||||
tabs=1
|
||||
users=Tasks:3
|
||||
resize=0
|
||||
wwn=0
|
||||
total=1
|
||||
usage=0
|
||||
banner=image
|
||||
dashapps=icons
|
||||
theme=black
|
||||
text=1
|
||||
unit=C
|
||||
warning=70
|
||||
critical=90
|
||||
hot=45
|
||||
max=55
|
||||
sysinfo=/Tools/SystemProfiler
|
||||
header=336699
|
||||
headermetacolor=FFFFFF
|
||||
background=F0F0F0
|
||||
showBannerGradient=yes
|
||||
|
||||
date="%c"
|
||||
time="%I:%M %p"
|
||||
number=".,"
|
||||
scale="-1"
|
||||
tabs="1"
|
||||
users="Tasks:3"
|
||||
resize="0"
|
||||
wwn="0"
|
||||
total="1"
|
||||
usage="0"
|
||||
banner="image"
|
||||
dashapps="icons"
|
||||
theme="white"
|
||||
text="1"
|
||||
unit="C"
|
||||
warning="70"
|
||||
critical="90"
|
||||
hot="45"
|
||||
max="55"
|
||||
sysinfo="/Tools/SystemProfiler"
|
||||
[notify]
|
||||
entity=1
|
||||
normal=1
|
||||
warning=1
|
||||
alert=1
|
||||
unraid=1
|
||||
plugin=1
|
||||
docker_notify=1
|
||||
report=1
|
||||
display=0
|
||||
date=d-m-Y
|
||||
time=H:i
|
||||
position=top-right
|
||||
path=./dev/notifications
|
||||
system=*/1 * * * *
|
||||
|
||||
entity="1"
|
||||
normal="1"
|
||||
warning="1"
|
||||
alert="1"
|
||||
unraid="1"
|
||||
plugin="1"
|
||||
docker_notify="1"
|
||||
report="1"
|
||||
display="0"
|
||||
date="d-m-Y"
|
||||
time="H:i"
|
||||
position="top-right"
|
||||
path="/app/dev/notifications"
|
||||
system="*/1 * * * *"
|
||||
@@ -1,36 +0,0 @@
|
||||
# Generated settings:
|
||||
NAME="Unraid"
|
||||
timeZone="America/New_York"
|
||||
COMMENT="Media server"
|
||||
SECURITY="user"
|
||||
WORKGROUP="WORKGROUP"
|
||||
DOMAIN=""
|
||||
DOMAIN_SHORT=""
|
||||
hideDotFiles="no"
|
||||
enableFruit="yes"
|
||||
USE_NETBIOS="no"
|
||||
localMaster="yes"
|
||||
serverMultiChannel="no"
|
||||
USE_WSD="yes"
|
||||
WSD_OPT=""
|
||||
WSD2_OPT=""
|
||||
USE_NTP="yes"
|
||||
NTP_SERVER1="time1.google.com"
|
||||
NTP_SERVER2="time2.google.com"
|
||||
NTP_SERVER3="time3.google.com"
|
||||
NTP_SERVER4="time4.google.com"
|
||||
DOMAIN_LOGIN="Administrator"
|
||||
DOMAIN_PASSWD=""
|
||||
SYS_MODEL="Custom"
|
||||
SYS_ARRAY_SLOTS="24"
|
||||
USE_SSL="yes"
|
||||
PORT="80"
|
||||
PORTSSL="8443"
|
||||
LOCAL_TLD="local"
|
||||
BIND_MGT="no"
|
||||
USE_TELNET="no"
|
||||
PORTTELNET="23"
|
||||
USE_SSH="yes"
|
||||
PORTSSH="22"
|
||||
USE_UPNP="yes"
|
||||
START_PAGE="Main"
|
||||
@@ -6,6 +6,6 @@
|
||||
"name": "Connect",
|
||||
"permissions": [],
|
||||
"roles": [
|
||||
"CONNECT"
|
||||
"connect"
|
||||
]
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
[api]
|
||||
version="4.4.1"
|
||||
version="4.0.1"
|
||||
extraOrigins="https://google.com,https://test.com"
|
||||
[local]
|
||||
sandbox="yes"
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
[eth0]
|
||||
DHCP_KEEPRESOLV="no"
|
||||
DNS_SERVER1="1.1.1.1"
|
||||
DNS_SERVER2="8.8.8.8"
|
||||
DHCP6_KEEPRESOLV="no"
|
||||
BONDING="yes"
|
||||
BONDNAME=""
|
||||
BONDNICS="eth0,eth1,eth2,eth3"
|
||||
BONDING_MODE="1"
|
||||
BONDING_MIIMON="100"
|
||||
BRIDGING="yes"
|
||||
BRNAME=""
|
||||
BRNICS="bond0"
|
||||
BRSTP="0"
|
||||
BRFD="0"
|
||||
DESCRIPTION:0=""
|
||||
PROTOCOL:0=""
|
||||
USE_DHCP:0="yes"
|
||||
IPADDR:0="192.168.1.150"
|
||||
NETMASK:0="255.255.255.0"
|
||||
GATEWAY:0="192.168.1.1"
|
||||
METRIC:0=""
|
||||
USE_DHCP6:0=""
|
||||
IPADDR6:0=""
|
||||
NETMASK6:0=""
|
||||
GATEWAY6:0=""
|
||||
METRIC6:0=""
|
||||
PRIVACY6:0=""
|
||||
MTU=""
|
||||
TYPE="access"
|
||||
@@ -1,190 +0,0 @@
|
||||
["disk1"]
|
||||
export="e"
|
||||
fruit="no"
|
||||
caseSensitive="auto"
|
||||
security="public"
|
||||
readList=""
|
||||
writeList=""
|
||||
volsizelimit=""
|
||||
["disk2"]
|
||||
export="e"
|
||||
fruit="no"
|
||||
caseSensitive="auto"
|
||||
security="public"
|
||||
readList=""
|
||||
writeList=""
|
||||
volsizelimit=""
|
||||
["disk3"]
|
||||
export="e"
|
||||
fruit="no"
|
||||
caseSensitive="auto"
|
||||
security="public"
|
||||
readList=""
|
||||
writeList=""
|
||||
volsizelimit=""
|
||||
["disk4"]
|
||||
export="e"
|
||||
fruit="no"
|
||||
caseSensitive="auto"
|
||||
security="public"
|
||||
readList=""
|
||||
writeList=""
|
||||
volsizelimit=""
|
||||
["disk5"]
|
||||
export="e"
|
||||
fruit="no"
|
||||
caseSensitive="auto"
|
||||
security="public"
|
||||
readList=""
|
||||
writeList=""
|
||||
volsizelimit=""
|
||||
["disk6"]
|
||||
export="e"
|
||||
fruit="no"
|
||||
caseSensitive="auto"
|
||||
security="public"
|
||||
readList=""
|
||||
writeList=""
|
||||
volsizelimit=""
|
||||
["disk7"]
|
||||
export="e"
|
||||
fruit="no"
|
||||
caseSensitive="auto"
|
||||
security="public"
|
||||
readList=""
|
||||
writeList=""
|
||||
volsizelimit=""
|
||||
["disk8"]
|
||||
export="e"
|
||||
fruit="no"
|
||||
caseSensitive="auto"
|
||||
security="public"
|
||||
readList=""
|
||||
writeList=""
|
||||
volsizelimit=""
|
||||
["disk9"]
|
||||
export="e"
|
||||
fruit="no"
|
||||
caseSensitive="auto"
|
||||
security="public"
|
||||
readList=""
|
||||
writeList=""
|
||||
volsizelimit=""
|
||||
["disk10"]
|
||||
export="e"
|
||||
fruit="no"
|
||||
caseSensitive="auto"
|
||||
security="public"
|
||||
readList=""
|
||||
writeList=""
|
||||
volsizelimit=""
|
||||
["disk11"]
|
||||
export="e"
|
||||
fruit="no"
|
||||
caseSensitive="auto"
|
||||
security="public"
|
||||
readList=""
|
||||
writeList=""
|
||||
volsizelimit=""
|
||||
["disk12"]
|
||||
export="e"
|
||||
fruit="no"
|
||||
caseSensitive="auto"
|
||||
security="public"
|
||||
readList=""
|
||||
writeList=""
|
||||
volsizelimit=""
|
||||
["disk13"]
|
||||
export="e"
|
||||
fruit="no"
|
||||
caseSensitive="auto"
|
||||
security="public"
|
||||
readList=""
|
||||
writeList=""
|
||||
volsizelimit=""
|
||||
["disk14"]
|
||||
export="e"
|
||||
fruit="no"
|
||||
caseSensitive="auto"
|
||||
security="public"
|
||||
readList=""
|
||||
writeList=""
|
||||
volsizelimit=""
|
||||
["disk15"]
|
||||
export="e"
|
||||
fruit="no"
|
||||
caseSensitive="auto"
|
||||
security="public"
|
||||
readList=""
|
||||
writeList=""
|
||||
volsizelimit=""
|
||||
["disk16"]
|
||||
export="e"
|
||||
fruit="no"
|
||||
caseSensitive="auto"
|
||||
security="public"
|
||||
readList=""
|
||||
writeList=""
|
||||
volsizelimit=""
|
||||
["disk17"]
|
||||
export="e"
|
||||
fruit="no"
|
||||
caseSensitive="auto"
|
||||
security="public"
|
||||
readList=""
|
||||
writeList=""
|
||||
volsizelimit=""
|
||||
["disk18"]
|
||||
export="e"
|
||||
fruit="no"
|
||||
caseSensitive="auto"
|
||||
security="public"
|
||||
readList=""
|
||||
writeList=""
|
||||
volsizelimit=""
|
||||
["disk19"]
|
||||
export="e"
|
||||
fruit="no"
|
||||
caseSensitive="auto"
|
||||
security="public"
|
||||
readList=""
|
||||
writeList=""
|
||||
volsizelimit=""
|
||||
["disk20"]
|
||||
export="e"
|
||||
fruit="no"
|
||||
caseSensitive="auto"
|
||||
security="public"
|
||||
readList=""
|
||||
writeList=""
|
||||
volsizelimit=""
|
||||
["disk21"]
|
||||
export="e"
|
||||
fruit="no"
|
||||
caseSensitive="auto"
|
||||
security="public"
|
||||
readList=""
|
||||
writeList=""
|
||||
volsizelimit=""
|
||||
["disk22"]
|
||||
export="e"
|
||||
fruit="no"
|
||||
caseSensitive="auto"
|
||||
security="public"
|
||||
readList=""
|
||||
writeList=""
|
||||
volsizelimit=""
|
||||
["abc"]
|
||||
export="e"
|
||||
fruit="no"
|
||||
caseSensitive="auto"
|
||||
security="public"
|
||||
readList=""
|
||||
writeList=""
|
||||
volsizelimit=""
|
||||
["flash"]
|
||||
export="e"
|
||||
fruit="no"
|
||||
security="public"
|
||||
readList=""
|
||||
writeList=""
|
||||
@@ -1,92 +0,0 @@
|
||||
["disk1"]
|
||||
export="-"
|
||||
security="public"
|
||||
hostList=""
|
||||
["disk2"]
|
||||
export="-"
|
||||
security="public"
|
||||
hostList=""
|
||||
["disk3"]
|
||||
export="-"
|
||||
security="public"
|
||||
hostList=""
|
||||
["disk4"]
|
||||
export="-"
|
||||
security="public"
|
||||
hostList=""
|
||||
["disk5"]
|
||||
export="-"
|
||||
security="public"
|
||||
hostList=""
|
||||
["disk6"]
|
||||
export="-"
|
||||
security="public"
|
||||
hostList=""
|
||||
["disk7"]
|
||||
export="-"
|
||||
security="public"
|
||||
hostList=""
|
||||
["disk8"]
|
||||
export="-"
|
||||
security="public"
|
||||
hostList=""
|
||||
["disk9"]
|
||||
export="-"
|
||||
security="public"
|
||||
hostList=""
|
||||
["disk10"]
|
||||
export="-"
|
||||
security="public"
|
||||
hostList=""
|
||||
["disk11"]
|
||||
export="-"
|
||||
security="public"
|
||||
hostList=""
|
||||
["disk12"]
|
||||
export="-"
|
||||
security="public"
|
||||
hostList=""
|
||||
["disk13"]
|
||||
export="-"
|
||||
security="public"
|
||||
hostList=""
|
||||
["disk14"]
|
||||
export="-"
|
||||
security="public"
|
||||
hostList=""
|
||||
["disk15"]
|
||||
export="-"
|
||||
security="public"
|
||||
hostList=""
|
||||
["disk16"]
|
||||
export="-"
|
||||
security="public"
|
||||
hostList=""
|
||||
["disk17"]
|
||||
export="-"
|
||||
security="public"
|
||||
hostList=""
|
||||
["disk18"]
|
||||
export="-"
|
||||
security="public"
|
||||
hostList=""
|
||||
["disk19"]
|
||||
export="-"
|
||||
security="public"
|
||||
hostList=""
|
||||
["disk20"]
|
||||
export="-"
|
||||
security="public"
|
||||
hostList=""
|
||||
["disk21"]
|
||||
export="-"
|
||||
security="public"
|
||||
hostList=""
|
||||
["disk22"]
|
||||
export="-"
|
||||
security="public"
|
||||
hostList=""
|
||||
["abc"]
|
||||
export="-"
|
||||
security="public"
|
||||
hostList=""
|
||||
@@ -1,68 +0,0 @@
|
||||
["appdata"]
|
||||
name="appdata"
|
||||
nameOrig="appdata"
|
||||
comment=""
|
||||
allocator="highwater"
|
||||
splitLevel=""
|
||||
floor="0"
|
||||
include=""
|
||||
exclude=""
|
||||
useCache="no"
|
||||
cachePool="cache"
|
||||
cow="auto"
|
||||
color="yellow-on"
|
||||
size="0"
|
||||
free="9091184"
|
||||
used="32831348"
|
||||
luksStatus="0"
|
||||
["domains"]
|
||||
name="domains"
|
||||
nameOrig="domains"
|
||||
comment="saved VM instances"
|
||||
allocator="highwater"
|
||||
splitLevel="1"
|
||||
floor="0"
|
||||
include=""
|
||||
exclude=""
|
||||
useCache="prefer"
|
||||
cachePool="cache"
|
||||
cow="auto"
|
||||
color="yellow-on"
|
||||
size="0"
|
||||
free="9091184"
|
||||
used="32831348"
|
||||
luksStatus="0"
|
||||
["isos"]
|
||||
name="isos"
|
||||
nameOrig="isos"
|
||||
comment="ISO images"
|
||||
allocator="highwater"
|
||||
splitLevel=""
|
||||
floor="0"
|
||||
include=""
|
||||
exclude=""
|
||||
useCache="yes"
|
||||
cachePool="cache"
|
||||
cow="auto"
|
||||
color="yellow-on"
|
||||
size="0"
|
||||
free="9091184"
|
||||
used="32831348"
|
||||
luksStatus="0"
|
||||
["system"]
|
||||
name="system"
|
||||
nameOrig="system"
|
||||
comment="system data"
|
||||
allocator="highwater"
|
||||
splitLevel="1"
|
||||
floor="0"
|
||||
include=""
|
||||
exclude=""
|
||||
useCache="prefer"
|
||||
cachePool="cache"
|
||||
cow="auto"
|
||||
color="yellow-on"
|
||||
size="0"
|
||||
free="9091184"
|
||||
used="32831348"
|
||||
luksStatus="0"
|
||||
@@ -1,15 +0,0 @@
|
||||
["root"]
|
||||
idx="0"
|
||||
name="root"
|
||||
desc="Console and webGui login account"
|
||||
passwd="yes"
|
||||
["xo"]
|
||||
idx="1"
|
||||
name="xo"
|
||||
desc=""
|
||||
passwd="yes"
|
||||
["test_user"]
|
||||
idx="2"
|
||||
name="test_user"
|
||||
desc=""
|
||||
passwd="no"
|
||||
@@ -87,7 +87,7 @@ shareAvahiSMBModel="Xserve"
|
||||
shfs_logging="1"
|
||||
safeMode="no"
|
||||
startMode="Normal"
|
||||
configValid="ineligible"
|
||||
configValid="yes"
|
||||
joinStatus="Not joined"
|
||||
deviceCount="4"
|
||||
flashGUID="0000-0000-0000-000000000000"
|
||||
@@ -102,7 +102,6 @@ regTm="1833409182"
|
||||
regTm2="0"
|
||||
regExp=""
|
||||
regGen="0"
|
||||
regState="ENOKEYFILE"
|
||||
sbName="/boot/config/super.dat"
|
||||
sbVersion="2.9.13"
|
||||
sbUpdated="1596079143"
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 222.36 39.04"><defs><linearGradient id="header-logo" x1="47.53" y1="79.1" x2="170.71" y2="-44.08" gradientUnits="userSpaceOnUse"><stop offset="0" stop-color="#e32929"/><stop offset="1" stop-color="#ff8d30"/></linearGradient></defs><title>unraid.net</title><path d="M146.7,29.47H135l-3,9h-6.49L138.93,0h8l13.41,38.49h-7.09L142.62,6.93l-5.83,16.88h8ZM29.69,0V25.4c0,8.91-5.77,13.64-14.9,13.64S0,34.31,0,25.4V0H6.54V25.4c0,5.17,3.19,7.92,8.25,7.92s8.36-2.75,8.36-7.92V0ZM50.86,12v26.5H44.31V0h6.11l17,26.5V0H74V38.49H67.9ZM171.29,0h6.54V38.49h-6.54Zm51.07,24.69c0,9-5.88,13.8-15.17,13.8H192.67V0H207.3c9.18,0,15.06,4.78,15.06,13.8ZM215.82,13.8c0-5.28-3.3-8.14-8.52-8.14h-8.08V32.77h8c5.33,0,8.63-2.8,8.63-8.08ZM108.31,23.92c4.34-1.6,6.93-5.28,6.93-11.55C115.24,3.68,110.18,0,102.48,0H88.84V38.49h6.55V5.66h6.87c3.8,0,6.21,1.82,6.21,6.71s-2.41,6.76-6.21,6.76H98.88l9.21,19.36h7.53Z" fill="url(#header-logo)"/></svg>
|
||||
|
Before Width: | Height: | Size: 1008 B |
@@ -1,124 +0,0 @@
|
||||
# Working with API plugins
|
||||
|
||||
Under the hood, API plugins (i.e. plugins to the `@unraid/api` project) are represented
|
||||
as npm `peerDependencies`. This is npm's intended package plugin mechanism, and given that
|
||||
peer dependencies are installed by default as of npm v7, it supports bi-directional plugin functionality,
|
||||
where the API provides dependencies for the plugin while the plugin provides functionality to the API.
|
||||
|
||||
## Private Workspace plugins
|
||||
|
||||
### Adding a local workspace package as an API plugin
|
||||
|
||||
The challenge with local workspace plugins is that they aren't available via npm during production.
|
||||
To solve this, we vendor them during the build process. Here's the complete process:
|
||||
|
||||
#### 1. Configure the build system
|
||||
|
||||
Add your workspace package to the vendoring configuration in `api/scripts/build.ts`:
|
||||
|
||||
```typescript
|
||||
const WORKSPACE_PACKAGES_TO_VENDOR = {
|
||||
'@unraid/shared': 'packages/unraid-shared',
|
||||
'unraid-api-plugin-connect': 'packages/unraid-api-plugin-connect',
|
||||
'your-plugin-name': 'packages/your-plugin-path', // Add your plugin here
|
||||
} as const;
|
||||
```
|
||||
|
||||
#### 2. Configure Vite
|
||||
|
||||
Add your workspace package to the Vite configuration in `api/vite.config.ts`:
|
||||
|
||||
```typescript
|
||||
const workspaceDependencies = {
|
||||
'@unraid/shared': 'packages/unraid-shared',
|
||||
'unraid-api-plugin-connect': 'packages/unraid-api-plugin-connect',
|
||||
'your-plugin-name': 'packages/your-plugin-path', // Add your plugin here
|
||||
};
|
||||
```
|
||||
|
||||
This ensures the package is:
|
||||
- Excluded from Vite's optimization during development
|
||||
- Marked as external during the build process
|
||||
- Properly handled in SSR mode
|
||||
|
||||
#### 3. Configure the API package.json
|
||||
|
||||
Add your workspace package as a peer dependency in `api/package.json`:
|
||||
|
||||
```json
|
||||
{
|
||||
"peerDependencies": {
|
||||
"unraid-api-plugin-connect": "workspace:*",
|
||||
"your-plugin-name": "workspace:*"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"unraid-api-plugin-connect": {
|
||||
"optional": true
|
||||
},
|
||||
"your-plugin-name": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
By marking the workspace dependency "optional", npm will not attempt to install it during development.
|
||||
The "workspace:*" identifier will be invalid during build-time and run-time, but won't cause problems
|
||||
because the package gets vendored instead.
|
||||
|
||||
#### 4. Plugin package setup
|
||||
|
||||
Your workspace plugin package should:
|
||||
|
||||
1. **Export types and main entry**: Set up proper `main`, `types`, and `exports` fields:
|
||||
```json
|
||||
{
|
||||
"name": "your-plugin-name",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./dist/index.d.ts",
|
||||
"import": "./dist/index.js"
|
||||
}
|
||||
},
|
||||
"files": ["dist"]
|
||||
}
|
||||
```
|
||||
|
||||
2. **Use peer dependencies**: Declare shared dependencies as peer dependencies to avoid duplication:
|
||||
```json
|
||||
{
|
||||
"peerDependencies": {
|
||||
"@nestjs/common": "^11.0.11",
|
||||
"@nestjs/core": "^11.0.11",
|
||||
"graphql": "^16.9.0"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
3. **Include build script**: Add a build script that compiles TypeScript:
|
||||
```json
|
||||
{
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"prepare": "npm run build"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### 5. Build process
|
||||
|
||||
During production builds:
|
||||
|
||||
1. The build script (`api/scripts/build.ts`) will automatically pack and install your workspace package as a tarball
|
||||
2. This happens after `npm install --omit=dev` in the pack directory
|
||||
3. The vendored package becomes a regular node_modules dependency in the final build
|
||||
|
||||
#### 6. Development vs Production
|
||||
|
||||
- **Development**: Vite resolves workspace packages directly from their source
|
||||
- **Production**: Packages are vendored as tarballs in `node_modules`
|
||||
|
||||
This approach ensures that workspace plugins work seamlessly in both development and production environments.
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
## Installation
|
||||
|
||||
Manual install of the staging and production plugins can be done with the following routes:
|
||||
Manual install can be done with the following routes:
|
||||
[production](https://stable.dl.unraid.net/unraid-api/dynamix.unraid.net.plg)
|
||||
[staging](https://preview.dl.unraid.net/unraid-api/dynamix.unraid.net.staging.plg)
|
||||
|
||||
|
||||
@@ -11,11 +11,11 @@ The repository consists of:
|
||||
- Core Modules
|
||||
- Tests
|
||||
|
||||
## API Server Architecture
|
||||
## API Server Architecture
|
||||
|
||||
The API server is built with NestJS and provides the core functionality for interacting with Unraid systems.
|
||||
|
||||
### Key Components
|
||||
### Key Components:
|
||||
|
||||
- `src/unraid-api/` - Core NestJS implementation
|
||||
- `src/core/` - Legacy business logic and utilities
|
||||
@@ -61,7 +61,7 @@ The store syncs data in two ways:
|
||||
The repository is organized into several packages:
|
||||
|
||||
- `api/` - NestJS API server
|
||||
- `plugin/` - Unraid plugin package
|
||||
- `plugin/` - Unraid plugin package
|
||||
- `web/` - Frontend application
|
||||
- `unraid-ui/` - Shared UI components
|
||||
|
||||
|
||||
@@ -1,219 +0,0 @@
|
||||
# Unraid API Development Workflows
|
||||
|
||||
This document outlines the various workflow styles available for developing, building, and deploying the Unraid API monorepo.
|
||||
|
||||
## Repository Structure
|
||||
|
||||
The Unraid API monorepo consists of several packages:
|
||||
|
||||
- `api`: The Unraid API backend
|
||||
- `web`: The web frontend components
|
||||
- `plugin`: The Unraid plugin
|
||||
- `unraid-ui`: UI components library
|
||||
|
||||
## Development Workflows
|
||||
|
||||
### Local Development
|
||||
|
||||
To start all development servers in the monorepo:
|
||||
|
||||
```bash
|
||||
pnpm dev
|
||||
```
|
||||
|
||||
This command runs all development servers concurrently:
|
||||
|
||||
- API server: <http://localhost:3001>
|
||||
- Web components: <http://localhost:4321>
|
||||
- UI components: <http://localhost:5173>
|
||||
|
||||
### Package-Specific Development
|
||||
|
||||
If you want to work on a specific package, you can run its development server individually:
|
||||
|
||||
#### API Development
|
||||
|
||||
```bash
|
||||
cd api
|
||||
pnpm dev
|
||||
```
|
||||
|
||||
#### Web Development
|
||||
|
||||
```bash
|
||||
cd web
|
||||
pnpm dev
|
||||
```
|
||||
|
||||
#### UI Component Development
|
||||
|
||||
```bash
|
||||
cd unraid-ui
|
||||
pnpm dev
|
||||
```
|
||||
|
||||
## Building Workflows
|
||||
|
||||
### Building All Packages
|
||||
|
||||
To build all packages in the monorepo:
|
||||
|
||||
```bash
|
||||
pnpm build
|
||||
```
|
||||
|
||||
### Watch Mode Building
|
||||
|
||||
For continuous building during development:
|
||||
|
||||
```bash
|
||||
pnpm build:watch
|
||||
```
|
||||
|
||||
This is useful when you want to see your changes reflected without manually rebuilding. This will also allow you to install a local plugin to test your changes.
|
||||
|
||||
### Package-Specific Building
|
||||
|
||||
#### API Building
|
||||
|
||||
```bash
|
||||
cd api
|
||||
pnpm build
|
||||
```
|
||||
|
||||
#### Web Building
|
||||
|
||||
```bash
|
||||
cd web
|
||||
pnpm build
|
||||
```
|
||||
|
||||
#### Development Build for Web
|
||||
|
||||
```bash
|
||||
cd web
|
||||
pnpm build:dev
|
||||
```
|
||||
|
||||
## Deployment Workflows
|
||||
|
||||
### Deploying to Development Unraid Server
|
||||
|
||||
To deploy to a development Unraid server:
|
||||
|
||||
```bash
|
||||
pnpm unraid:deploy <SERVER_IP>
|
||||
```
|
||||
|
||||
This command builds and deploys all components to the specified Unraid server.
|
||||
|
||||
### Package-Specific Deployment
|
||||
|
||||
#### API Deployment
|
||||
|
||||
```bash
|
||||
cd api
|
||||
pnpm unraid:deploy <SERVER_IP>
|
||||
```
|
||||
|
||||
#### Web Deployment
|
||||
|
||||
```bash
|
||||
cd web
|
||||
pnpm unraid:deploy <SERVER_IP>
|
||||
```
|
||||
|
||||
#### Plugin Deployment
|
||||
|
||||
```bash
|
||||
cd plugin
|
||||
pnpm unraid:deploy <SERVER_IP>
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
To run tests across all packages:
|
||||
|
||||
```bash
|
||||
pnpm test
|
||||
```
|
||||
|
||||
### Package-Specific Testing
|
||||
|
||||
```bash
|
||||
cd <package-directory>
|
||||
pnpm test
|
||||
```
|
||||
|
||||
## Code Quality Workflows
|
||||
|
||||
### Linting
|
||||
|
||||
To lint all packages:
|
||||
|
||||
```bash
|
||||
pnpm lint
|
||||
```
|
||||
|
||||
To automatically fix linting issues:
|
||||
|
||||
```bash
|
||||
pnpm lint:fix
|
||||
```
|
||||
|
||||
### Type Checking
|
||||
|
||||
To run type checking across all packages:
|
||||
|
||||
```bash
|
||||
pnpm type-check
|
||||
```
|
||||
|
||||
## GraphQL Codegen Workflows
|
||||
|
||||
For packages that use GraphQL, you can generate types from your schema:
|
||||
|
||||
```bash
|
||||
cd <package-directory>
|
||||
pnpm codegen
|
||||
```
|
||||
|
||||
To watch for changes and regenerate types:
|
||||
|
||||
```bash
|
||||
cd <package-directory>
|
||||
pnpm codegen:watch
|
||||
```
|
||||
|
||||
## Docker Workflows
|
||||
|
||||
The API package supports Docker-based development:
|
||||
|
||||
```bash
|
||||
cd api
|
||||
pnpm container:build # Build the Docker container
|
||||
pnpm container:start # Start the container
|
||||
pnpm container:stop # Stop the container
|
||||
pnpm container:enter # Enter the container shell
|
||||
pnpm container:test # Run tests in the container
|
||||
```
|
||||
|
||||
## CLI Commands
|
||||
|
||||
When working with a deployed Unraid API, you can use the CLI:
|
||||
|
||||
```bash
|
||||
unraid-api --help
|
||||
```
|
||||
|
||||
## Recommended Workflow for New Developers
|
||||
|
||||
1. Clone the repository: `git clone git@github.com:unraid/api.git`
|
||||
2. Set up the monorepo: `just setup` or `pnpm install`
|
||||
3. Start development servers: `pnpm dev`
|
||||
4. Make your changes
|
||||
5. Test your changes: `pnpm test`
|
||||
6. Deploy to a development server: `pnpm unraid:deploy <SERVER_IP>`
|
||||
7. Verify your changes on the Unraid server
|
||||
|
||||
If using nix, run `nix develop` from the root of the repo before Step 2.
|
||||
@@ -11,7 +11,6 @@
|
||||
"max_restarts": 10,
|
||||
"min_uptime": 10000,
|
||||
"watch": false,
|
||||
"interpreter": "/usr/local/bin/node",
|
||||
"ignore_watch": ["node_modules", "src", ".env.*", "myservers.cfg"],
|
||||
"log_file": "/var/log/graphql-api.log",
|
||||
"kill_timeout": 10000
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
16
api/justfile
16
api/justfile
@@ -4,17 +4,17 @@ default:
|
||||
@just --list --justfile {{justfile()}} --list-heading $'\nAPI project recipes:\n'
|
||||
@just list-commands
|
||||
|
||||
setup:
|
||||
pnpm install
|
||||
pnpm run container:build
|
||||
|
||||
# builds js files that can run on an unraid server
|
||||
@build:
|
||||
pnpm run build
|
||||
|
||||
# deploys to an unraid server
|
||||
@deploy remote:
|
||||
./scripts/deploy-dev.sh {{remote}}
|
||||
@deploy:
|
||||
./scripts/deploy-dev.sh
|
||||
|
||||
alias b := build
|
||||
alias d := deploy
|
||||
|
||||
sync-env server:
|
||||
rsync -avz --progress --stats -e ssh .env* root@{{server}}:/usr/local/unraid-api
|
||||
ssh root@{{server}} 'cp /usr/local/unraid-api/.env.staging /usr/local/unraid-api/.env'
|
||||
# build & deploy
|
||||
bd: build deploy
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
# Legacy Assets
|
||||
|
||||
This folder will store legacy types / functionality that may be useful but is not currently a part of the API
|
||||
File diff suppressed because it is too large
Load Diff
335
api/package.json
335
api/package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@unraid/api",
|
||||
"version": "4.10.0",
|
||||
"version": "4.1.2",
|
||||
"main": "src/cli/index.ts",
|
||||
"type": "module",
|
||||
"corepack": {
|
||||
@@ -8,225 +8,188 @@
|
||||
},
|
||||
"repository": "git@github.com:unraid/api.git",
|
||||
"author": "Lime Technology, Inc. <unraid.net>",
|
||||
"license": "GPL-2.0-or-later",
|
||||
"license": "UNLICENSED",
|
||||
"engines": {
|
||||
"pnpm": "10.13.1"
|
||||
"pnpm": ">=8.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"// Development": "",
|
||||
"// Main application commands": "",
|
||||
"start": "node dist/main.js",
|
||||
"dev": "vite",
|
||||
"dev:debug": "NODE_OPTIONS='--inspect-brk=9229 --enable-source-maps' vite",
|
||||
"command": "pnpm run build && clear && ./dist/cli.js",
|
||||
"command:raw": "./dist/cli.js",
|
||||
"// Build and Deploy": "",
|
||||
"// Build commands": "",
|
||||
"build": "vite build --mode=production",
|
||||
"postbuild": "chmod +x dist/main.js && chmod +x dist/cli.js",
|
||||
"build:watch": "WATCH_MODE=true nodemon --watch src --ext ts,js,json --exec 'tsx ./scripts/build.ts'",
|
||||
"build:docker": "./scripts/dc.sh run --rm builder",
|
||||
"build:release": "tsx ./scripts/build.ts",
|
||||
"preunraid:deploy": "pnpm build",
|
||||
"unraid:deploy": "./scripts/deploy-dev.sh",
|
||||
"// GraphQL Codegen": "",
|
||||
"build-and-pack": "tsx ./scripts/build.ts",
|
||||
"// Code generation commands": "",
|
||||
"codegen": "MOTHERSHIP_GRAPHQL_LINK='https://staging.mothership.unraid.net/ws' graphql-codegen --config codegen.ts -r dotenv/config './.env.staging'",
|
||||
"codegen:watch": "DOTENV_CONFIG_PATH='./.env.staging' graphql-codegen --config codegen.ts --watch -r dotenv/config",
|
||||
"codegen:local": "NODE_TLS_REJECT_UNAUTHORIZED=0 MOTHERSHIP_GRAPHQL_LINK='https://mothership.localhost/ws' graphql-codegen --config codegen.ts --watch",
|
||||
"// Code Quality": "",
|
||||
"// Development and quality tools": "",
|
||||
"tsc": "tsc --noEmit",
|
||||
"lint": "eslint --config .eslintrc.ts src/",
|
||||
"lint:fix": "eslint --fix --config .eslintrc.ts src/",
|
||||
"type-check": "tsc --noEmit",
|
||||
"// Testing": "",
|
||||
"release": "standard-version",
|
||||
"// Testing commands": "",
|
||||
"test": "NODE_ENV=test vitest run",
|
||||
"test:watch": "NODE_ENV=test vitest --ui",
|
||||
"coverage": "NODE_ENV=test vitest run --coverage",
|
||||
"// Docker": "",
|
||||
"// Container management commands": "",
|
||||
"container:build": "./scripts/dc.sh build dev",
|
||||
"container:start": "pnpm run container:stop && ./scripts/dc.sh run --rm --service-ports dev",
|
||||
"container:stop": "./scripts/dc.sh stop dev",
|
||||
"container:test": "./scripts/dc.sh run --rm builder pnpm run test",
|
||||
"container:enter": "./scripts/dc.sh exec dev /bin/bash",
|
||||
"// Migration Scripts": "",
|
||||
"migration:codefirst": "tsx ./src/unraid-api/graph/migration-script.ts"
|
||||
"container:enter": "./scripts/dc.sh exec dev /bin/bash"
|
||||
},
|
||||
"bin": {
|
||||
"unraid-api": "dist/cli.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@apollo/client": "3.13.8",
|
||||
"@apollo/server": "4.12.2",
|
||||
"@as-integrations/fastify": "2.1.1",
|
||||
"@fastify/cookie": "11.0.2",
|
||||
"@fastify/helmet": "13.0.1",
|
||||
"@graphql-codegen/client-preset": "4.8.3",
|
||||
"@graphql-tools/load-files": "7.0.1",
|
||||
"@graphql-tools/merge": "9.0.24",
|
||||
"@graphql-tools/schema": "10.0.23",
|
||||
"@graphql-tools/utils": "10.8.6",
|
||||
"@jsonforms/core": "3.6.0",
|
||||
"@nestjs/apollo": "13.1.0",
|
||||
"@nestjs/cache-manager": "3.0.1",
|
||||
"@nestjs/common": "11.1.3",
|
||||
"@nestjs/config": "4.0.2",
|
||||
"@nestjs/core": "11.1.3",
|
||||
"@nestjs/event-emitter": "3.0.1",
|
||||
"@nestjs/graphql": "13.1.0",
|
||||
"@nestjs/passport": "11.0.5",
|
||||
"@nestjs/platform-fastify": "11.1.3",
|
||||
"@nestjs/schedule": "6.0.0",
|
||||
"@nestjs/throttler": "6.4.0",
|
||||
"@reduxjs/toolkit": "2.8.2",
|
||||
"@runonflux/nat-upnp": "1.0.2",
|
||||
"@types/diff": "8.0.0",
|
||||
"@unraid/libvirt": "2.1.0",
|
||||
"@unraid/shared": "workspace:*",
|
||||
"accesscontrol": "2.2.1",
|
||||
"bycontract": "2.0.11",
|
||||
"bytes": "3.1.2",
|
||||
"cache-manager": "7.0.1",
|
||||
"cacheable-lookup": "7.0.0",
|
||||
"camelcase-keys": "9.1.3",
|
||||
"casbin": "5.38.0",
|
||||
"change-case": "5.4.4",
|
||||
"chokidar": "4.0.3",
|
||||
"class-transformer": "0.5.1",
|
||||
"class-validator": "0.14.2",
|
||||
"cli-table": "0.3.11",
|
||||
"command-exists": "1.2.9",
|
||||
"convert": "5.12.0",
|
||||
"cookie": "1.0.2",
|
||||
"cron": "4.3.2",
|
||||
"cross-fetch": "4.1.0",
|
||||
"diff": "8.0.2",
|
||||
"dockerode": "4.0.7",
|
||||
"dotenv": "17.2.0",
|
||||
"execa": "9.6.0",
|
||||
"exit-hook": "4.0.0",
|
||||
"fastify": "5.4.0",
|
||||
"filenamify": "6.0.0",
|
||||
"fs-extra": "11.3.0",
|
||||
"glob": "11.0.3",
|
||||
"global-agent": "3.0.0",
|
||||
"got": "14.4.7",
|
||||
"graphql": "16.11.0",
|
||||
"graphql-fields": "2.0.3",
|
||||
"graphql-scalars": "1.24.2",
|
||||
"graphql-subscriptions": "3.0.0",
|
||||
"graphql-tag": "2.12.6",
|
||||
"graphql-ws": "6.0.6",
|
||||
"ini": "5.0.0",
|
||||
"ip": "2.0.1",
|
||||
"jose": "6.0.11",
|
||||
"json-bigint-patch": "0.0.8",
|
||||
"lodash-es": "4.17.21",
|
||||
"multi-ini": "2.3.2",
|
||||
"mustache": "4.2.0",
|
||||
"nest-authz": "2.17.0",
|
||||
"nest-commander": "3.17.0",
|
||||
"nestjs-pino": "4.4.0",
|
||||
"node-cache": "5.1.2",
|
||||
"node-window-polyfill": "1.0.4",
|
||||
"p-retry": "6.2.1",
|
||||
"passport-custom": "1.1.1",
|
||||
"passport-http-header-strategy": "1.1.0",
|
||||
"path-type": "6.0.0",
|
||||
"pino": "9.7.0",
|
||||
"pino-http": "10.5.0",
|
||||
"pino-pretty": "13.0.0",
|
||||
"pm2": "6.0.8",
|
||||
"@apollo/client": "^3.11.8",
|
||||
"@apollo/server": "^4.11.2",
|
||||
"@as-integrations/fastify": "^2.1.1",
|
||||
"@fastify/cookie": "^9.4.0",
|
||||
"@graphql-codegen/client-preset": "^4.5.0",
|
||||
"@graphql-tools/load-files": "^7.0.0",
|
||||
"@graphql-tools/merge": "^9.0.8",
|
||||
"@graphql-tools/schema": "^10.0.7",
|
||||
"@graphql-tools/utils": "^10.5.5",
|
||||
"@nestjs/apollo": "^12.2.1",
|
||||
"@nestjs/common": "^10.4.7",
|
||||
"@nestjs/core": "^10.4.7",
|
||||
"@nestjs/graphql": "^12.2.1",
|
||||
"@nestjs/passport": "^10.0.3",
|
||||
"@nestjs/platform-fastify": "^10.4.7",
|
||||
"@nestjs/schedule": "^4.1.1",
|
||||
"@nestjs/throttler": "^6.2.1",
|
||||
"@reduxjs/toolkit": "^2.3.0",
|
||||
"@runonflux/nat-upnp": "^1.0.2",
|
||||
"@types/diff": "^7.0.1",
|
||||
"@unraid/libvirt": "^1.1.3",
|
||||
"accesscontrol": "^2.2.1",
|
||||
"bycontract": "^2.0.11",
|
||||
"bytes": "^3.1.2",
|
||||
"cacheable-lookup": "^7.0.0",
|
||||
"camelcase-keys": "^9.1.3",
|
||||
"casbin": "^5.32.0",
|
||||
"catch-exit": "^1.2.2",
|
||||
"chokidar": "^4.0.1",
|
||||
"cli-table": "^0.3.11",
|
||||
"command-exists": "^1.2.9",
|
||||
"convert": "^5.5.1",
|
||||
"cookie": "^1.0.2",
|
||||
"cron": "3.5.0",
|
||||
"cross-fetch": "^4.0.0",
|
||||
"diff": "^7.0.0",
|
||||
"docker-event-emitter": "^0.3.0",
|
||||
"dockerode": "^3.3.5",
|
||||
"dotenv": "^16.4.5",
|
||||
"execa": "^9.5.1",
|
||||
"exit-hook": "^4.0.0",
|
||||
"filenamify": "^6.0.0",
|
||||
"fs-extra": "^11.2.0",
|
||||
"glob": "^11.0.1",
|
||||
"global-agent": "^3.0.0",
|
||||
"got": "^14.4.4",
|
||||
"graphql": "^16.9.0",
|
||||
"graphql-fields": "^2.0.3",
|
||||
"graphql-scalars": "^1.23.0",
|
||||
"graphql-subscriptions": "^2.0.0",
|
||||
"graphql-tag": "^2.12.6",
|
||||
"graphql-type-json": "^0.3.2",
|
||||
"graphql-type-uuid": "^0.2.0",
|
||||
"graphql-ws": "^5.16.0",
|
||||
"ini": "^4.1.2",
|
||||
"ip": "^2.0.1",
|
||||
"jose": "^5.9.6",
|
||||
"lodash-es": "^4.17.21",
|
||||
"multi-ini": "^2.3.2",
|
||||
"mustache": "^4.2.0",
|
||||
"nest-authz": "^2.11.0",
|
||||
"nest-commander": "^3.15.0",
|
||||
"nestjs-pino": "^4.1.0",
|
||||
"node-cache": "^5.1.2",
|
||||
"node-window-polyfill": "^1.0.2",
|
||||
"p-retry": "^6.2.0",
|
||||
"passport-custom": "^1.1.1",
|
||||
"passport-http-header-strategy": "^1.1.0",
|
||||
"path-type": "^6.0.0",
|
||||
"pino": "^9.5.0",
|
||||
"pino-http": "^10.3.0",
|
||||
"pino-pretty": "^11.3.0",
|
||||
"pm2": "^5.4.2",
|
||||
"reflect-metadata": "^0.1.14",
|
||||
"request": "2.88.2",
|
||||
"rxjs": "7.8.2",
|
||||
"semver": "7.7.2",
|
||||
"strftime": "0.10.3",
|
||||
"systeminformation": "5.27.7",
|
||||
"uuid": "11.1.0",
|
||||
"ws": "8.18.3",
|
||||
"zen-observable-ts": "1.1.0",
|
||||
"zod": "3.25.76"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"unraid-api-plugin-connect": "workspace:*"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"unraid-api-plugin-connect": {
|
||||
"optional": true
|
||||
}
|
||||
"request": "^2.88.2",
|
||||
"semver": "^7.6.3",
|
||||
"strftime": "^0.10.3",
|
||||
"systeminformation": "^5.25.11",
|
||||
"uuid": "^11.0.2",
|
||||
"ws": "^8.18.0",
|
||||
"zod": "^3.23.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "9.31.0",
|
||||
"@graphql-codegen/add": "5.0.3",
|
||||
"@graphql-codegen/cli": "5.0.7",
|
||||
"@graphql-codegen/fragment-matcher": "5.1.0",
|
||||
"@graphql-codegen/import-types-preset": "3.0.1",
|
||||
"@graphql-codegen/typed-document-node": "5.1.2",
|
||||
"@graphql-codegen/typescript": "4.1.6",
|
||||
"@graphql-codegen/typescript-operations": "4.6.1",
|
||||
"@graphql-codegen/typescript-resolvers": "4.5.1",
|
||||
"@graphql-typed-document-node/core": "3.2.0",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.5.1",
|
||||
"@nestjs/testing": "11.1.3",
|
||||
"@originjs/vite-plugin-commonjs": "1.0.3",
|
||||
"@rollup/plugin-node-resolve": "16.0.1",
|
||||
"@swc/core": "1.12.14",
|
||||
"@types/async-exit-hook": "2.0.2",
|
||||
"@types/bytes": "3.1.5",
|
||||
"@types/cli-table": "0.3.4",
|
||||
"@types/command-exists": "1.2.3",
|
||||
"@types/cors": "2.8.19",
|
||||
"@types/dockerode": "3.3.42",
|
||||
"@types/graphql-fields": "1.3.9",
|
||||
"@types/graphql-type-uuid": "0.2.6",
|
||||
"@types/ini": "4.1.1",
|
||||
"@types/ip": "1.1.3",
|
||||
"@types/lodash": "4.17.20",
|
||||
"@types/lodash-es": "4.17.12",
|
||||
"@types/mustache": "4.2.6",
|
||||
"@types/node": "22.16.4",
|
||||
"@types/pify": "6.1.0",
|
||||
"@types/semver": "7.7.0",
|
||||
"@types/sendmail": "1.4.7",
|
||||
"@types/stoppable": "1.1.3",
|
||||
"@types/strftime": "0.9.8",
|
||||
"@types/uuid": "10.0.0",
|
||||
"@types/ws": "8.18.1",
|
||||
"@types/wtfnode": "0.7.3",
|
||||
"@vitest/coverage-v8": "3.2.4",
|
||||
"@vitest/ui": "3.2.4",
|
||||
"@graphql-codegen/add": "^5.0.3",
|
||||
"@graphql-codegen/cli": "^5.0.3",
|
||||
"@graphql-codegen/fragment-matcher": "^5.0.2",
|
||||
"@graphql-codegen/import-types-preset": "^3.0.0",
|
||||
"@graphql-codegen/typed-document-node": "^5.0.11",
|
||||
"@graphql-codegen/typescript": "^4.1.1",
|
||||
"@graphql-codegen/typescript-operations": "^4.3.1",
|
||||
"@graphql-codegen/typescript-resolvers": "4.4.3",
|
||||
"@graphql-typed-document-node/core": "^3.2.0",
|
||||
"@ianvs/prettier-plugin-sort-imports": "^4.4.0",
|
||||
"@nestjs/testing": "^10.4.7",
|
||||
"@originjs/vite-plugin-commonjs": "^1.0.3",
|
||||
"@rollup/plugin-node-resolve": "^15.3.0",
|
||||
"@swc/core": "^1.10.1",
|
||||
"@types/async-exit-hook": "^2.0.2",
|
||||
"@types/bytes": "^3.1.4",
|
||||
"@types/cli-table": "^0.3.4",
|
||||
"@types/command-exists": "^1.2.3",
|
||||
"@types/cors": "^2.8.17",
|
||||
"@types/dockerode": "^3.3.31",
|
||||
"@types/graphql-fields": "^1.3.9",
|
||||
"@types/graphql-type-uuid": "^0.2.6",
|
||||
"@types/ini": "^4.1.1",
|
||||
"@types/ip": "^1.1.3",
|
||||
"@types/lodash": "^4.17.13",
|
||||
"@types/mustache": "^4.2.5",
|
||||
"@types/node": "^22.9.0",
|
||||
"@types/pify": "^5.0.4",
|
||||
"@types/semver": "^7.5.8",
|
||||
"@types/sendmail": "^1.4.7",
|
||||
"@types/stoppable": "^1.1.3",
|
||||
"@types/strftime": "^0.9.8",
|
||||
"@types/uuid": "^10.0.0",
|
||||
"@types/ws": "^8.5.13",
|
||||
"@types/wtfnode": "^0.7.3",
|
||||
"@vitest/coverage-v8": "^3.0.5",
|
||||
"@vitest/ui": "^3.0.5",
|
||||
"cz-conventional-changelog": "3.3.0",
|
||||
"eslint": "9.31.0",
|
||||
"eslint-plugin-import": "2.32.0",
|
||||
"eslint-plugin-n": "17.21.0",
|
||||
"eslint-plugin-no-relative-import-paths": "1.6.1",
|
||||
"eslint-plugin-prettier": "5.5.1",
|
||||
"graphql-codegen-typescript-validation-schema": "0.17.1",
|
||||
"jiti": "2.4.2",
|
||||
"nodemon": "3.1.10",
|
||||
"prettier": "3.6.2",
|
||||
"rollup-plugin-node-externals": "8.0.1",
|
||||
"commit-and-tag-version": "9.6.0",
|
||||
"tsx": "4.20.3",
|
||||
"type-fest": "4.41.0",
|
||||
"typescript": "5.8.3",
|
||||
"typescript-eslint": "8.37.0",
|
||||
"unplugin-swc": "1.5.5",
|
||||
"vite": "7.0.4",
|
||||
"vite-plugin-node": "7.0.0",
|
||||
"vite-tsconfig-paths": "5.1.4",
|
||||
"vitest": "3.2.4",
|
||||
"zx": "8.7.1"
|
||||
"eslint": "^9.14.0",
|
||||
"eslint-plugin-no-relative-import-paths": "^1.6.1",
|
||||
"eslint-plugin-prettier": "^5.2.3",
|
||||
"graphql-codegen-typescript-validation-schema": "^0.17.0",
|
||||
"jiti": "^2.4.0",
|
||||
"nodemon": "^3.1.7",
|
||||
"rollup-plugin-node-externals": "^7.1.3",
|
||||
"standard-version": "^9.5.0",
|
||||
"tsx": "^4.19.2",
|
||||
"typescript": "^5.6.3",
|
||||
"typescript-eslint": "^8.13.0",
|
||||
"unplugin-swc": "^1.5.1",
|
||||
"vite": "^5.4.14",
|
||||
"vite-plugin-node": "^4.0.0",
|
||||
"vite-tsconfig-paths": "^5.1.0",
|
||||
"vitest": "^3.0.5",
|
||||
"zx": "^8.3.2"
|
||||
},
|
||||
"overrides": {
|
||||
"eslint": {
|
||||
"jiti": "2.4.2"
|
||||
},
|
||||
"@as-integrations/fastify": {
|
||||
"fastify": "$fastify"
|
||||
},
|
||||
"nest-authz": {
|
||||
"@nestjs/common": "$@nestjs/common",
|
||||
"@nestjs/core": "$@nestjs/core"
|
||||
"jiti": "2"
|
||||
}
|
||||
},
|
||||
"private": true,
|
||||
"packageManager": "pnpm@10.13.1"
|
||||
"packageManager": "pnpm@10.4.1"
|
||||
}
|
||||
|
||||
@@ -1,61 +1,15 @@
|
||||
#!/usr/bin/env zx
|
||||
import { mkdir, readFile, writeFile } from 'fs/promises';
|
||||
import { existsSync } from 'node:fs';
|
||||
import { basename, join, resolve } from 'node:path';
|
||||
import { mkdir, readFile, rm, writeFile } from 'fs/promises';
|
||||
import { exit } from 'process';
|
||||
|
||||
import type { PackageJson } from 'type-fest';
|
||||
import { $, cd } from 'zx';
|
||||
|
||||
import { getDeploymentVersion } from './get-deployment-version.js';
|
||||
|
||||
type ApiPackageJson = PackageJson & {
|
||||
version: string;
|
||||
peerDependencies: Record<string, string>;
|
||||
dependencies?: Record<string, string>;
|
||||
};
|
||||
|
||||
/**
|
||||
* Map of workspace packages to vendor into production builds.
|
||||
* Key: package name, Value: path from monorepo root to the package directory
|
||||
*/
|
||||
const WORKSPACE_PACKAGES_TO_VENDOR = {
|
||||
'@unraid/shared': 'packages/unraid-shared',
|
||||
'unraid-api-plugin-connect': 'packages/unraid-api-plugin-connect',
|
||||
} as const;
|
||||
|
||||
/**
|
||||
* Packs a workspace package and installs it as a tarball dependency.
|
||||
*/
|
||||
const packAndInstallWorkspacePackage = async (pkgName: string, pkgPath: string, tempDir: string) => {
|
||||
const [fullPkgPath, fullTempDir] = [resolve(pkgPath), resolve(tempDir)];
|
||||
if (!existsSync(fullPkgPath)) {
|
||||
console.warn(`Workspace package ${pkgName} not found at ${fullPkgPath}. Skipping.`);
|
||||
return;
|
||||
}
|
||||
console.log(`Building and packing workspace package ${pkgName}...`);
|
||||
// Pack the package to a tarball
|
||||
const packedResult = await $`pnpm --filter ${pkgName} pack --pack-destination ${fullTempDir}`;
|
||||
const tarballPath = packedResult.lines().at(-1)!;
|
||||
const tarballName = basename(tarballPath);
|
||||
|
||||
// Install the tarball
|
||||
const tarballPattern = join(fullTempDir, tarballName);
|
||||
await $`npm install ${tarballPattern}`;
|
||||
};
|
||||
|
||||
/**------------------------------------------------------------------------
|
||||
* Build Script
|
||||
*
|
||||
* Builds & vendors the API for deployment to an Unraid server.
|
||||
*
|
||||
* Places artifacts in the `deploy/` folder:
|
||||
* - release/ contains source code & assets
|
||||
* - node-modules-archive/ contains tarball of node_modules
|
||||
*------------------------------------------------------------------------**/
|
||||
|
||||
try {
|
||||
// Create release and pack directories
|
||||
// Clean existing deploy folder
|
||||
await rm('./deploy', { recursive: true }).catch(() => {});
|
||||
await mkdir('./deploy/release', { recursive: true });
|
||||
await mkdir('./deploy/pack', { recursive: true });
|
||||
|
||||
@@ -67,68 +21,37 @@ try {
|
||||
|
||||
// Get package details
|
||||
const packageJson = await readFile('./package.json', 'utf-8');
|
||||
const parsedPackageJson = JSON.parse(packageJson) as ApiPackageJson;
|
||||
const parsedPackageJson = JSON.parse(packageJson);
|
||||
|
||||
const deploymentVersion = await getDeploymentVersion(process.env, parsedPackageJson.version);
|
||||
|
||||
// Update the package.json version to the deployment version
|
||||
parsedPackageJson.version = deploymentVersion;
|
||||
|
||||
/**---------------------------------------------
|
||||
* Handle workspace runtime dependencies
|
||||
*--------------------------------------------*/
|
||||
const workspaceDeps = Object.keys(WORKSPACE_PACKAGES_TO_VENDOR);
|
||||
if (workspaceDeps.length > 0) {
|
||||
console.log(`Stripping workspace deps from package.json: ${workspaceDeps.join(', ')}`);
|
||||
workspaceDeps.forEach((dep) => {
|
||||
if (parsedPackageJson.dependencies?.[dep]) {
|
||||
delete parsedPackageJson.dependencies[dep];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// omit dev dependencies from vendored dependencies in release build
|
||||
parsedPackageJson.devDependencies = {};
|
||||
|
||||
// Create a temporary directory for packaging
|
||||
await mkdir('./deploy/pack/', { recursive: true });
|
||||
|
||||
|
||||
await writeFile('./deploy/pack/package.json', JSON.stringify(parsedPackageJson, null, 4));
|
||||
// Copy necessary files to the pack directory
|
||||
await $`cp -r dist README.md .env.* ecosystem.config.json ./deploy/pack/`;
|
||||
|
||||
|
||||
// Change to the pack directory and install dependencies
|
||||
cd('./deploy/pack');
|
||||
|
||||
console.log('Building production node_modules...');
|
||||
|
||||
console.log('Installing production dependencies...');
|
||||
$.verbose = true;
|
||||
await $`npm install --omit=dev`;
|
||||
|
||||
await writeFile('package.json', JSON.stringify(parsedPackageJson, null, 4));
|
||||
|
||||
/** After npm install, vendor workspace packages via pack/install */
|
||||
if (workspaceDeps.length > 0) {
|
||||
console.log('Vendoring workspace packages...');
|
||||
const tempDir = './packages';
|
||||
await mkdir(tempDir, { recursive: true });
|
||||
|
||||
for (const dep of workspaceDeps) {
|
||||
const pkgPath =
|
||||
WORKSPACE_PACKAGES_TO_VENDOR[dep as keyof typeof WORKSPACE_PACKAGES_TO_VENDOR];
|
||||
// The extra '../../../' prefix adjusts for the fact that we're in the pack directory.
|
||||
// this way, pkgPath can be defined relative to the monorepo root.
|
||||
await packAndInstallWorkspacePackage(dep, join('../../../', pkgPath), tempDir);
|
||||
}
|
||||
}
|
||||
|
||||
// Clean the release directory
|
||||
await $`rm -rf ../release/*`;
|
||||
|
||||
// Copy other files to release directory
|
||||
await $`cp -r ./* ../release/`;
|
||||
await $`pnpm install --prod --ignore-workspace --node-linker hoisted`;
|
||||
|
||||
// chmod the cli
|
||||
await $`chmod +x ./dist/cli.js`;
|
||||
await $`chmod +x ./dist/main.js`;
|
||||
|
||||
// Create the tarball
|
||||
await $`tar -czf ../release/unraid-api.tgz ./`;
|
||||
|
||||
// Clean up
|
||||
cd('..');
|
||||
|
||||
} catch (error) {
|
||||
// Error with a command
|
||||
if (Object.keys(error).includes('stderr')) {
|
||||
|
||||
@@ -1,17 +1,26 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Arguments
|
||||
# $1: SSH server name (required)
|
||||
# Path to store the last used server name
|
||||
state_file="$HOME/.deploy_state"
|
||||
|
||||
# Read the last used server name from the state file
|
||||
if [[ -f "$state_file" ]]; then
|
||||
last_server_name=$(cat "$state_file")
|
||||
else
|
||||
last_server_name=""
|
||||
fi
|
||||
|
||||
# Read the server name from the command-line argument or use the last used server name as the default
|
||||
server_name="${1:-$last_server_name}"
|
||||
|
||||
# Check if the server name is provided
|
||||
if [[ -z "$1" ]]; then
|
||||
echo "Error: SSH server name is required."
|
||||
echo "Usage: $0 <server_name>"
|
||||
if [[ -z "$server_name" ]]; then
|
||||
echo "Please provide the SSH server name."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Set server name from command-line argument
|
||||
server_name="$1"
|
||||
# Save the current server name to the state file
|
||||
echo "$server_name" > "$state_file"
|
||||
|
||||
# Source directory path
|
||||
source_directory="./dist"
|
||||
@@ -25,11 +34,9 @@ if [ ! -d "$source_directory" ]; then
|
||||
fi
|
||||
fi
|
||||
|
||||
# Destination directory path
|
||||
destination_directory="/usr/local/unraid-api"
|
||||
|
||||
# Change ownership on copy
|
||||
# Replace the value inside the rsync command with the user's input
|
||||
rsync_command="rsync -avz --delete --progress --stats -e ssh \"$source_directory\" \"root@${server_name}:$destination_directory\""
|
||||
rsync_command="rsync -avz -e ssh $source_directory root@${server_name}:/usr/local/unraid-api"
|
||||
|
||||
echo "Executing the following command:"
|
||||
echo "$rsync_command"
|
||||
@@ -39,10 +46,10 @@ eval "$rsync_command"
|
||||
exit_code=$?
|
||||
|
||||
# Chown the directory
|
||||
ssh root@"${server_name}" 'chown -R root:root /usr/local/unraid-api'
|
||||
ssh root@"${server_name}" "chown -R root:root /usr/local/unraid-api"
|
||||
|
||||
# Run unraid-api restart on remote host
|
||||
ssh root@"${server_name}" 'INTROSPECTION=true LOG_LEVEL=trace unraid-api restart'
|
||||
ssh root@"${server_name}" "INTROSPECTION=true LOG_LEVEL=trace unraid-api restart"
|
||||
|
||||
# Play built-in sound based on the operating system
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
|
||||
@@ -19,12 +19,10 @@ export const getDeploymentVersion = async (env = process.env, packageVersion: st
|
||||
return env.IS_TAGGED ? packageVersion : `${packageVersion}+${env.GIT_SHA}`;
|
||||
} else {
|
||||
const gitShortSHA = await runCommand('git', ['rev-parse', '--short', 'HEAD']);
|
||||
const isCommitTagged =
|
||||
(await runCommand('git', ['describe', '--tags', '--abbrev=0', '--exact-match'])) !==
|
||||
undefined;
|
||||
|
||||
const isCommitTagged = await runCommand('git', ['describe', '--tags', '--abbrev=0', '--exact-match']) !== undefined;
|
||||
|
||||
console.log('gitShortSHA', gitShortSHA, 'isCommitTagged', isCommitTagged);
|
||||
|
||||
|
||||
if (!gitShortSHA) {
|
||||
console.error('Failed to get git short SHA');
|
||||
process.exit(1);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { getAllowedOrigins } from '@app/common/allowed-origins.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { loadConfigFile } from '@app/store/modules/config.js';
|
||||
import { loadStateFiles } from '@app/store/modules/emhttp.js';
|
||||
import { getAllowedOrigins } from '@app/common/allowed-origins';
|
||||
import { store } from '@app/store/index';
|
||||
import { loadConfigFile } from '@app/store/modules/config';
|
||||
import { loadStateFiles } from '@app/store/modules/emhttp';
|
||||
|
||||
import 'reflect-metadata';
|
||||
|
||||
@@ -9,37 +9,36 @@ import { expect, test } from 'vitest';
|
||||
|
||||
test('Returns allowed origins', async () => {
|
||||
// Load state files into store
|
||||
await store.dispatch(loadStateFiles()).unwrap();
|
||||
await store.dispatch(loadConfigFile()).unwrap();
|
||||
await store.dispatch(loadStateFiles());
|
||||
await store.dispatch(loadConfigFile());
|
||||
|
||||
// Get allowed origins
|
||||
const allowedOrigins = getAllowedOrigins();
|
||||
|
||||
// Test that the result is an array
|
||||
expect(Array.isArray(allowedOrigins)).toBe(true);
|
||||
|
||||
// Test that it contains the expected socket paths
|
||||
expect(allowedOrigins).toContain('/var/run/unraid-notifications.sock');
|
||||
expect(allowedOrigins).toContain('/var/run/unraid-php.sock');
|
||||
expect(allowedOrigins).toContain('/var/run/unraid-cli.sock');
|
||||
|
||||
// Test that it contains the expected local URLs
|
||||
expect(allowedOrigins).toContain('http://localhost:8080');
|
||||
expect(allowedOrigins).toContain('https://localhost:4443');
|
||||
|
||||
// Test that it contains the expected connect URLs
|
||||
expect(allowedOrigins).toContain('https://connect.myunraid.net');
|
||||
expect(allowedOrigins).toContain('https://connect-staging.myunraid.net');
|
||||
expect(allowedOrigins).toContain('https://dev-my.myunraid.net:4000');
|
||||
|
||||
// Test that it contains the extra origins from config
|
||||
expect(allowedOrigins).toContain('https://google.com');
|
||||
expect(allowedOrigins).toContain('https://test.com');
|
||||
|
||||
// Test that it contains some of the remote URLs
|
||||
expect(allowedOrigins).toContain('https://tower.local:4443');
|
||||
expect(allowedOrigins).toContain('https://192.168.1.150:4443');
|
||||
|
||||
// Test that there are no duplicates
|
||||
expect(allowedOrigins.length).toBe(new Set(allowedOrigins).size);
|
||||
expect(getAllowedOrigins()).toMatchInlineSnapshot(`
|
||||
[
|
||||
"/var/run/unraid-notifications.sock",
|
||||
"/var/run/unraid-php.sock",
|
||||
"/var/run/unraid-cli.sock",
|
||||
"http://localhost:8080",
|
||||
"https://localhost:4443",
|
||||
"https://tower.local:4443",
|
||||
"https://192.168.1.150:4443",
|
||||
"https://tower:4443",
|
||||
"https://192-168-1-150.thisisfourtyrandomcharacters012345678900.myunraid.net:4443",
|
||||
"https://85-121-123-122.thisisfourtyrandomcharacters012345678900.myunraid.net:8443",
|
||||
"https://10-252-0-1.hash.myunraid.net:4443",
|
||||
"https://10-252-1-1.hash.myunraid.net:4443",
|
||||
"https://10-253-3-1.hash.myunraid.net:4443",
|
||||
"https://10-253-4-1.hash.myunraid.net:4443",
|
||||
"https://10-253-5-1.hash.myunraid.net:4443",
|
||||
"https://10-100-0-1.hash.myunraid.net:4443",
|
||||
"https://10-100-0-2.hash.myunraid.net:4443",
|
||||
"https://10-123-1-2.hash.myunraid.net:4443",
|
||||
"https://221-123-121-112.hash.myunraid.net:4443",
|
||||
"https://google.com",
|
||||
"https://test.com",
|
||||
"https://connect.myunraid.net",
|
||||
"https://connect-staging.myunraid.net",
|
||||
"https://dev-my.myunraid.net:4000",
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
import { test } from 'vitest';
|
||||
|
||||
test.todo('Returns generated data');
|
||||
@@ -1,137 +0,0 @@
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { ApiConfigPersistence } from '@app/unraid-api/config/api-config.module.js';
|
||||
import { ConfigPersistenceHelper } from '@app/unraid-api/config/persistence.helper.js';
|
||||
|
||||
describe('ApiConfigPersistence', () => {
|
||||
let service: ApiConfigPersistence;
|
||||
let configService: ConfigService;
|
||||
let persistenceHelper: ConfigPersistenceHelper;
|
||||
|
||||
beforeEach(() => {
|
||||
configService = {
|
||||
get: vi.fn(),
|
||||
set: vi.fn(),
|
||||
} as any;
|
||||
|
||||
persistenceHelper = {} as ConfigPersistenceHelper;
|
||||
service = new ApiConfigPersistence(configService, persistenceHelper);
|
||||
});
|
||||
|
||||
describe('convertLegacyConfig', () => {
|
||||
it('should migrate sandbox from string "yes" to boolean true', () => {
|
||||
const legacyConfig = {
|
||||
local: { sandbox: 'yes' },
|
||||
api: { extraOrigins: '' },
|
||||
remote: { ssoSubIds: '' },
|
||||
};
|
||||
|
||||
const result = service.convertLegacyConfig(legacyConfig);
|
||||
|
||||
expect(result.sandbox).toBe(true);
|
||||
});
|
||||
|
||||
it('should migrate sandbox from string "no" to boolean false', () => {
|
||||
const legacyConfig = {
|
||||
local: { sandbox: 'no' },
|
||||
api: { extraOrigins: '' },
|
||||
remote: { ssoSubIds: '' },
|
||||
};
|
||||
|
||||
const result = service.convertLegacyConfig(legacyConfig);
|
||||
|
||||
expect(result.sandbox).toBe(false);
|
||||
});
|
||||
|
||||
it('should migrate extraOrigins from comma-separated string to array', () => {
|
||||
const legacyConfig = {
|
||||
local: { sandbox: 'no' },
|
||||
api: { extraOrigins: 'https://example.com,https://test.com' },
|
||||
remote: { ssoSubIds: '' },
|
||||
};
|
||||
|
||||
const result = service.convertLegacyConfig(legacyConfig);
|
||||
|
||||
expect(result.extraOrigins).toEqual(['https://example.com', 'https://test.com']);
|
||||
});
|
||||
|
||||
it('should filter out non-HTTP origins from extraOrigins', () => {
|
||||
const legacyConfig = {
|
||||
local: { sandbox: 'no' },
|
||||
api: {
|
||||
extraOrigins: 'https://example.com,invalid-origin,http://test.com,ftp://bad.com',
|
||||
},
|
||||
remote: { ssoSubIds: '' },
|
||||
};
|
||||
|
||||
const result = service.convertLegacyConfig(legacyConfig);
|
||||
|
||||
expect(result.extraOrigins).toEqual(['https://example.com', 'http://test.com']);
|
||||
});
|
||||
|
||||
it('should handle empty extraOrigins string', () => {
|
||||
const legacyConfig = {
|
||||
local: { sandbox: 'no' },
|
||||
api: { extraOrigins: '' },
|
||||
remote: { ssoSubIds: '' },
|
||||
};
|
||||
|
||||
const result = service.convertLegacyConfig(legacyConfig);
|
||||
|
||||
expect(result.extraOrigins).toEqual([]);
|
||||
});
|
||||
|
||||
it('should migrate ssoSubIds from comma-separated string to array', () => {
|
||||
const legacyConfig = {
|
||||
local: { sandbox: 'no' },
|
||||
api: { extraOrigins: '' },
|
||||
remote: { ssoSubIds: 'user1,user2,user3' },
|
||||
};
|
||||
|
||||
const result = service.convertLegacyConfig(legacyConfig);
|
||||
|
||||
expect(result.ssoSubIds).toEqual(['user1', 'user2', 'user3']);
|
||||
});
|
||||
|
||||
it('should handle empty ssoSubIds string', () => {
|
||||
const legacyConfig = {
|
||||
local: { sandbox: 'no' },
|
||||
api: { extraOrigins: '' },
|
||||
remote: { ssoSubIds: '' },
|
||||
};
|
||||
|
||||
const result = service.convertLegacyConfig(legacyConfig);
|
||||
|
||||
expect(result.ssoSubIds).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle undefined config sections', () => {
|
||||
const legacyConfig = {};
|
||||
|
||||
const result = service.convertLegacyConfig(legacyConfig);
|
||||
|
||||
expect(result.sandbox).toBe(false);
|
||||
expect(result.extraOrigins).toEqual([]);
|
||||
expect(result.ssoSubIds).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle complete migration with all fields', () => {
|
||||
const legacyConfig = {
|
||||
local: { sandbox: 'yes' },
|
||||
api: { extraOrigins: 'https://app1.example.com,https://app2.example.com' },
|
||||
remote: { ssoSubIds: 'sub1,sub2,sub3' },
|
||||
};
|
||||
|
||||
const result = service.convertLegacyConfig(legacyConfig);
|
||||
|
||||
expect(result.sandbox).toBe(true);
|
||||
expect(result.extraOrigins).toEqual([
|
||||
'https://app1.example.com',
|
||||
'https://app2.example.com',
|
||||
]);
|
||||
expect(result.ssoSubIds).toEqual(['sub1', 'sub2', 'sub3']);
|
||||
});
|
||||
});
|
||||
});
|
||||
209
api/src/__test__/core/modules/array/get-array-data.test.ts
Normal file
209
api/src/__test__/core/modules/array/get-array-data.test.ts
Normal file
@@ -0,0 +1,209 @@
|
||||
import { expect, test, vi } from 'vitest';
|
||||
|
||||
import { getArrayData } from '@app/core/modules/array/get-array-data';
|
||||
import { store } from '@app/store';
|
||||
import { loadConfigFile } from '@app/store/modules/config';
|
||||
import { loadStateFiles } from '@app/store/modules/emhttp';
|
||||
|
||||
vi.mock('@app/core/pubsub', () => ({
|
||||
pubsub: { publish: vi.fn() },
|
||||
}));
|
||||
|
||||
test('Creates an array event', async () => {
|
||||
// Load state files into store
|
||||
await store.dispatch(loadStateFiles());
|
||||
|
||||
await store.dispatch(loadConfigFile());
|
||||
|
||||
const arrayEvent = getArrayData(store.getState);
|
||||
expect(arrayEvent).toMatchObject({
|
||||
boot: {
|
||||
comment: 'Unraid OS boot device',
|
||||
critical: null,
|
||||
device: 'sda',
|
||||
exportable: true,
|
||||
format: 'unknown',
|
||||
fsFree: 3191407,
|
||||
fsSize: 4042732,
|
||||
fsType: 'vfat',
|
||||
fsUsed: 851325,
|
||||
id: 'Cruzer',
|
||||
idx: 32,
|
||||
name: 'flash',
|
||||
numErrors: 0,
|
||||
numReads: 0,
|
||||
numWrites: 0,
|
||||
rotational: true,
|
||||
size: 3956700,
|
||||
status: 'DISK_OK',
|
||||
temp: null,
|
||||
transport: 'usb',
|
||||
type: 'Flash',
|
||||
warning: null,
|
||||
},
|
||||
caches: [
|
||||
{
|
||||
comment: '',
|
||||
critical: null,
|
||||
device: 'sdi',
|
||||
exportable: false,
|
||||
format: 'MBR: 4KiB-aligned',
|
||||
fsFree: 111810683,
|
||||
fsSize: 250059317,
|
||||
fsType: 'btrfs',
|
||||
fsUsed: 137273827,
|
||||
id: 'Samsung_SSD_850_EVO_250GB_S2R5NX0H643734Z',
|
||||
idx: 30,
|
||||
name: 'cache',
|
||||
numErrors: 0,
|
||||
numReads: 0,
|
||||
numWrites: 0,
|
||||
rotational: false,
|
||||
size: 244198552,
|
||||
status: 'DISK_OK',
|
||||
temp: 22,
|
||||
transport: 'ata',
|
||||
type: 'Cache',
|
||||
warning: null,
|
||||
},
|
||||
{
|
||||
comment: null,
|
||||
critical: null,
|
||||
device: 'nvme0n1',
|
||||
exportable: false,
|
||||
format: 'MBR: 4KiB-aligned',
|
||||
fsFree: null,
|
||||
fsSize: null,
|
||||
fsType: null,
|
||||
fsUsed: null,
|
||||
id: 'KINGSTON_SA2000M8250G_50026B7282669D9E',
|
||||
idx: 31,
|
||||
name: 'cache2',
|
||||
numErrors: 0,
|
||||
numReads: 0,
|
||||
numWrites: 0,
|
||||
rotational: false,
|
||||
size: 244198552,
|
||||
status: 'DISK_OK',
|
||||
temp: 27,
|
||||
transport: 'nvme',
|
||||
type: 'Cache',
|
||||
warning: null,
|
||||
},
|
||||
],
|
||||
capacity: {
|
||||
disks: {
|
||||
free: '27',
|
||||
total: '30',
|
||||
used: '3',
|
||||
},
|
||||
kilobytes: {
|
||||
free: '19495825571',
|
||||
total: '41994745901',
|
||||
used: '22498920330',
|
||||
},
|
||||
},
|
||||
disks: [
|
||||
{
|
||||
comment: 'Seagate Exos',
|
||||
critical: 75,
|
||||
device: 'sdf',
|
||||
exportable: false,
|
||||
format: 'GPT: 4KiB-aligned',
|
||||
fsFree: 13882739732,
|
||||
fsSize: 17998742753,
|
||||
fsType: 'xfs',
|
||||
fsUsed: 4116003021,
|
||||
id: 'ST18000NM000J-2TV103_ZR5B1W9X',
|
||||
idx: 1,
|
||||
name: 'disk1',
|
||||
numErrors: 0,
|
||||
numReads: 0,
|
||||
numWrites: 0,
|
||||
rotational: true,
|
||||
size: 17578328012,
|
||||
status: 'DISK_OK',
|
||||
temp: 30,
|
||||
transport: 'ata',
|
||||
type: 'Data',
|
||||
warning: 50,
|
||||
},
|
||||
{
|
||||
comment: '',
|
||||
critical: null,
|
||||
device: 'sdj',
|
||||
exportable: false,
|
||||
format: 'GPT: 4KiB-aligned',
|
||||
fsFree: 93140746,
|
||||
fsSize: 11998001574,
|
||||
fsType: 'xfs',
|
||||
fsUsed: 11904860828,
|
||||
id: 'WDC_WD120EDAZ-11F3RA0_5PJRD45C',
|
||||
idx: 2,
|
||||
name: 'disk2',
|
||||
numErrors: 0,
|
||||
numReads: 0,
|
||||
numWrites: 0,
|
||||
rotational: true,
|
||||
size: 11718885324,
|
||||
status: 'DISK_OK',
|
||||
temp: 30,
|
||||
transport: 'ata',
|
||||
type: 'Data',
|
||||
warning: null,
|
||||
},
|
||||
{
|
||||
comment: '',
|
||||
critical: null,
|
||||
device: 'sde',
|
||||
exportable: false,
|
||||
format: 'GPT: 4KiB-aligned',
|
||||
fsFree: 5519945093,
|
||||
fsSize: 11998001574,
|
||||
fsType: 'xfs',
|
||||
fsUsed: 6478056481,
|
||||
id: 'WDC_WD120EMAZ-11BLFA0_5PH8BTYD',
|
||||
idx: 3,
|
||||
name: 'disk3',
|
||||
numErrors: 0,
|
||||
numReads: 0,
|
||||
numWrites: 0,
|
||||
rotational: true,
|
||||
size: 11718885324,
|
||||
status: 'DISK_OK',
|
||||
temp: 30,
|
||||
transport: 'ata',
|
||||
type: 'Data',
|
||||
warning: null,
|
||||
},
|
||||
],
|
||||
id: expect.any(String),
|
||||
parities: [
|
||||
{
|
||||
comment: null,
|
||||
critical: null,
|
||||
device: 'sdh',
|
||||
exportable: false,
|
||||
format: 'GPT: 4KiB-aligned',
|
||||
fsFree: null,
|
||||
fsSize: null,
|
||||
fsType: null,
|
||||
fsUsed: null,
|
||||
id: 'ST18000NM000J-2TV103_ZR585CPY',
|
||||
idx: 0,
|
||||
name: 'parity',
|
||||
numErrors: 0,
|
||||
numReads: 0,
|
||||
numWrites: 0,
|
||||
rotational: true,
|
||||
size: 17578328012,
|
||||
status: 'DISK_OK',
|
||||
temp: 25,
|
||||
transport: 'ata',
|
||||
type: 'Parity',
|
||||
warning: null,
|
||||
},
|
||||
],
|
||||
state: 'STOPPED',
|
||||
});
|
||||
});
|
||||
@@ -1,3 +0,0 @@
|
||||
import { test } from 'vitest';
|
||||
|
||||
test.todo('Returns the current content');
|
||||
@@ -1,5 +0,0 @@
|
||||
import { test } from 'vitest';
|
||||
|
||||
test.todo('Returns a single disk by ID');
|
||||
|
||||
test.todo('Returns nothing if no disk matches the ID');
|
||||
@@ -1,5 +0,0 @@
|
||||
import { test } from 'vitest';
|
||||
|
||||
test.todo('Returns all the Docker containers');
|
||||
|
||||
test.todo('Returns running Docker containers');
|
||||
@@ -1,7 +0,0 @@
|
||||
import { test } from 'vitest';
|
||||
|
||||
test.todo('Returns all USB devices');
|
||||
|
||||
test.todo('Returns all PCI-e devices');
|
||||
|
||||
test.todo('Returns all audio devices');
|
||||
@@ -1,3 +0,0 @@
|
||||
import { test } from 'vitest';
|
||||
|
||||
test.todo('Gets total count of Docker containers installed/running');
|
||||
@@ -1,3 +0,0 @@
|
||||
import { test } from 'vitest';
|
||||
|
||||
test.todo('Returns baseboard info');
|
||||
@@ -1,3 +0,0 @@
|
||||
import { test } from 'vitest';
|
||||
|
||||
test.todo('Gets CPU info');
|
||||
@@ -1,8 +1,8 @@
|
||||
import { expect, test, vi } from 'vitest';
|
||||
|
||||
import { ConsoleNotifier } from '@app/core/notifiers/console.js';
|
||||
import { ConsoleNotifier } from '@app/core/notifiers/console';
|
||||
|
||||
vi.mock('@app/core/log.js', () => ({
|
||||
vi.mock('@app/core/log', () => ({
|
||||
logger: {
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { expect, test, vi } from 'vitest';
|
||||
|
||||
import { UnraidLocalNotifier } from '@app/core/notifiers/unraid-local.js';
|
||||
import { UnraidLocalNotifier } from '@app/core/notifiers/unraid-local';
|
||||
|
||||
vi.mock('@app/core/log.js', () => ({
|
||||
vi.mock('@app/core/log', () => ({
|
||||
logger: {
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
|
||||
23
api/src/__test__/core/utils/array/array-is-running.test.ts
Normal file
23
api/src/__test__/core/utils/array/array-is-running.test.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { expect, test, vi } from 'vitest';
|
||||
|
||||
import type { SliceState } from '@app/store/modules/emhttp';
|
||||
import { getters } from '@app/store';
|
||||
|
||||
test('Returns true if the array is started', async () => {
|
||||
vi.spyOn(getters, 'emhttp').mockImplementation(
|
||||
() => ({ var: { mdState: 'STARTED' } }) as unknown as SliceState
|
||||
);
|
||||
|
||||
const { arrayIsRunning } = await import('@app/core/utils/array/array-is-running');
|
||||
expect(arrayIsRunning()).toBe(true);
|
||||
vi.spyOn(getters, 'emhttp').mockReset();
|
||||
});
|
||||
|
||||
test('Returns false if the array is stopped', async () => {
|
||||
vi.spyOn(getters, 'emhttp').mockImplementation(
|
||||
() => ({ var: { mdState: 'Stopped' } }) as unknown as SliceState
|
||||
);
|
||||
const { arrayIsRunning } = await import('@app/core/utils/array/array-is-running');
|
||||
expect(arrayIsRunning()).toBe(false);
|
||||
vi.spyOn(getters, 'emhttp').mockReset();
|
||||
});
|
||||
@@ -3,8 +3,8 @@ import 'reflect-metadata';
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
import { getWriteableConfig } from '@app/core/utils/files/config-file-normalizer.js';
|
||||
import { initialState } from '@app/store/modules/config.js';
|
||||
import { getWriteableConfig } from '@app/core/utils/files/config-file-normalizer';
|
||||
import { initialState } from '@app/store/modules/config';
|
||||
|
||||
test('it creates a FLASH config with NO OPTIONAL values', () => {
|
||||
const basicConfig = initialState;
|
||||
@@ -80,6 +80,7 @@ test('it creates a FLASH config with OPTIONAL values', () => {
|
||||
// 2fa & t2fa should be ignored
|
||||
basicConfig.remote['2Fa'] = 'yes';
|
||||
basicConfig.local['2Fa'] = 'yes';
|
||||
basicConfig.local.showT2Fa = 'yes';
|
||||
|
||||
basicConfig.api.extraOrigins = 'myextra.origins';
|
||||
basicConfig.remote.upnpEnabled = 'yes';
|
||||
@@ -119,6 +120,7 @@ test('it creates a MEMORY config with OPTIONAL values', () => {
|
||||
// 2fa & t2fa should be ignored
|
||||
basicConfig.remote['2Fa'] = 'yes';
|
||||
basicConfig.local['2Fa'] = 'yes';
|
||||
basicConfig.local.showT2Fa = 'yes';
|
||||
basicConfig.api.extraOrigins = 'myextra.origins';
|
||||
basicConfig.remote.upnpEnabled = 'yes';
|
||||
basicConfig.connectionStatus.upnpStatus = 'Turned On';
|
||||
|
||||
@@ -2,7 +2,7 @@ import { parse } from 'ini';
|
||||
import { Serializer } from 'multi-ini';
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
import { safelySerializeObjectToIni } from '@app/core/utils/files/safe-ini-serializer.js';
|
||||
import { safelySerializeObjectToIni } from '@app/core/utils/files/safe-ini-serializer';
|
||||
|
||||
test('MultiIni breaks when serializing an object with a boolean inside', async () => {
|
||||
const objectToSerialize = {
|
||||
|
||||
@@ -1,11 +1,8 @@
|
||||
import { expect, test } from 'vitest';
|
||||
|
||||
import {
|
||||
getBannerPathIfPresent,
|
||||
getCasePathIfPresent,
|
||||
} from '@app/core/utils/images/image-file-helpers.js';
|
||||
import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-file.js';
|
||||
import { store } from '@app/store/index.js';
|
||||
import { getBannerPathIfPresent, getCasePathIfPresent } from '@app/core/utils/images/image-file-helpers';
|
||||
import { loadDynamixConfigFile } from '@app/store/actions/load-dynamix-config-file';
|
||||
import { store } from '@app/store/index';
|
||||
|
||||
test('get case path returns expected result', async () => {
|
||||
await expect(getCasePathIfPresent()).resolves.toContain('/dev/dynamix/case-model.png');
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
import { test } from 'vitest';
|
||||
|
||||
test.todo('Sleeps atomically for n milliseconds');
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user