mirror of
https://github.com/unraid/api.git
synced 2026-01-03 06:59:50 -06:00
Compare commits
56 Commits
feat/cpu-s
...
feat/flash
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b663293f1b | ||
|
|
4c0b967164 | ||
|
|
0d864fa948 | ||
|
|
984b8748ef | ||
|
|
a406fdc5fe | ||
|
|
e7066c0e09 | ||
|
|
872559ce56 | ||
|
|
f99264e73d | ||
|
|
73ba3f074a | ||
|
|
87fc83645f | ||
|
|
f126c9568a | ||
|
|
c273a3b7e7 | ||
|
|
92f3d6956e | ||
|
|
90ed4b9de3 | ||
|
|
015c6e527b | ||
|
|
5fcb8da50b | ||
|
|
5b0862dd98 | ||
|
|
8da7c6e586 | ||
|
|
333093a20d | ||
|
|
69359902cb | ||
|
|
8befa23b4d | ||
|
|
f0c26b777f | ||
|
|
f29d4f5318 | ||
|
|
7f9f4c68ac | ||
|
|
cebca3d6bf | ||
|
|
25f57f90aa | ||
|
|
50b80b9c07 | ||
|
|
69b8eb9060 | ||
|
|
d83d36c355 | ||
|
|
7c26b01be6 | ||
|
|
1d3800c164 | ||
|
|
9d4249950d | ||
|
|
5e9d09e75c | ||
|
|
64c71459be | ||
|
|
8f8352090c | ||
|
|
744f34fc7b | ||
|
|
3ffde0272c | ||
|
|
a5c7b9fdd3 | ||
|
|
db9b8c12b9 | ||
|
|
93d9530628 | ||
|
|
af5ffec13d | ||
|
|
54b0bc0837 | ||
|
|
7be58908f0 | ||
|
|
00b1c1b0c7 | ||
|
|
d3adbafbff | ||
|
|
dada8e63c5 | ||
|
|
f5e4607f70 | ||
|
|
68139cda2b | ||
|
|
bf3b95bfe5 | ||
|
|
35a6d14367 | ||
|
|
0be56f148d | ||
|
|
4c9e0044e5 | ||
|
|
242697c8d8 | ||
|
|
f93c850b95 | ||
|
|
8df0ca58b5 | ||
|
|
d31d86dc7d |
@@ -14,7 +14,17 @@
|
||||
"Bash(mv:*)",
|
||||
"Bash(ls:*)",
|
||||
"mcp__ide__getDiagnostics",
|
||||
"Bash(pnpm --filter \"*connect*\" test connect-status-writer.service.spec)"
|
||||
"Bash(pnpm --filter \"*connect*\" test connect-status-writer.service.spec)",
|
||||
"Bash(pnpm add:*)",
|
||||
"Bash(npx tsc:*)",
|
||||
"Bash(pnpm list:*)",
|
||||
"Bash(rm:*)",
|
||||
"Bash(pnpm --filter ./api test)",
|
||||
"Bash(pnpm i:*)",
|
||||
"Bash(pnpm:*)",
|
||||
"Bash(corepack prepare:*)",
|
||||
"Bash(nvm:*)",
|
||||
"Bash(git config:*)"
|
||||
]
|
||||
},
|
||||
"enableAllProjectMcpServers": false
|
||||
|
||||
@@ -15,6 +15,7 @@ PATHS_ACTIVATION_BASE=./dev/activation
|
||||
PATHS_PASSWD=./dev/passwd
|
||||
PATHS_RCLONE_SOCKET=./dev/rclone-socket
|
||||
PATHS_LOG_BASE=./dev/log # Where we store logs
|
||||
PATHS_BACKUP_JOBS=./dev/api/backup
|
||||
ENVIRONMENT="development"
|
||||
NODE_ENV="development"
|
||||
PORT="3001"
|
||||
@@ -26,4 +27,4 @@ BYPASS_PERMISSION_CHECKS=false
|
||||
BYPASS_CORS_CHECKS=true
|
||||
CHOKIDAR_USEPOLLING=true
|
||||
LOG_TRANSPORT=console
|
||||
LOG_LEVEL=trace
|
||||
LOG_LEVEL=debug # Change to trace for extremely noisy logging
|
||||
|
||||
@@ -53,5 +53,5 @@ export default tseslint.config(eslint.configs.recommended, ...tseslint.configs.r
|
||||
'eol-last': ['error', 'always'],
|
||||
},
|
||||
|
||||
ignores: ['src/graphql/generated/client/**/*'],
|
||||
ignores: ['src/graphql/generated/client/**/*', 'scripts/**/*'],
|
||||
});
|
||||
|
||||
25
api/dev/api/backup/backup-jobs.json
Normal file
25
api/dev/api/backup/backup-jobs.json
Normal file
@@ -0,0 +1,25 @@
|
||||
[
|
||||
{
|
||||
"id": "a68667b6-f4ef-4c47-aec3-d9886be78487",
|
||||
"name": "Test",
|
||||
"sourceType": "RAW",
|
||||
"destinationType": "RCLONE",
|
||||
"schedule": "0 2 * * *",
|
||||
"enabled": true,
|
||||
"sourceConfig": {
|
||||
"label": "Raw file backup",
|
||||
"sourcePath": "/Users/elibosley/Desktop",
|
||||
"excludePatterns": [],
|
||||
"includePatterns": []
|
||||
},
|
||||
"destinationConfig": {
|
||||
"type": "RCLONE",
|
||||
"remoteName": "google_drives",
|
||||
"destinationPath": "desktop"
|
||||
},
|
||||
"createdAt": "2025-05-27T15:02:31.655Z",
|
||||
"updatedAt": "2025-05-27T15:11:40.547Z",
|
||||
"lastRunAt": "2025-05-27T15:07:37.139Z",
|
||||
"lastRunStatus": "Failed: RClone group backup-job_1748358397105_sbo5j322k failed or timed out."
|
||||
}
|
||||
]
|
||||
@@ -1,10 +1,12 @@
|
||||
{
|
||||
"version": "4.8.0",
|
||||
"version": "4.9.5",
|
||||
"extraOrigins": [
|
||||
"https://google.com",
|
||||
"https://test.com"
|
||||
],
|
||||
"sandbox": true,
|
||||
"ssoSubIds": [],
|
||||
"plugins": ["unraid-api-plugin-connect"]
|
||||
"plugins": [
|
||||
"unraid-api-plugin-connect"
|
||||
]
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
[api]
|
||||
version="4.4.1"
|
||||
version="4.8.0"
|
||||
extraOrigins="https://google.com,https://test.com"
|
||||
[local]
|
||||
sandbox="yes"
|
||||
|
||||
@@ -598,6 +598,7 @@ enum Resource {
|
||||
ACTIVATION_CODE
|
||||
API_KEY
|
||||
ARRAY
|
||||
BACKUP
|
||||
CLOUD
|
||||
CONFIG
|
||||
CONNECT
|
||||
@@ -653,6 +654,63 @@ type ApiKeyWithSecret implements Node {
|
||||
key: String!
|
||||
}
|
||||
|
||||
type JobStatus implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""External job ID from the job execution system"""
|
||||
externalJobId: String!
|
||||
name: String!
|
||||
status: BackupJobStatus!
|
||||
|
||||
"""Progress percentage (0-100)"""
|
||||
progress: Int!
|
||||
message: String
|
||||
error: String
|
||||
startTime: DateTime!
|
||||
endTime: DateTime
|
||||
|
||||
"""Bytes transferred"""
|
||||
bytesTransferred: Int
|
||||
|
||||
"""Total bytes to transfer"""
|
||||
totalBytes: Int
|
||||
|
||||
"""Transfer speed in bytes per second"""
|
||||
speed: Int
|
||||
|
||||
"""Elapsed time in seconds"""
|
||||
elapsedTime: Int
|
||||
|
||||
"""Estimated time to completion in seconds"""
|
||||
eta: Int
|
||||
|
||||
"""Human-readable bytes transferred"""
|
||||
formattedBytesTransferred: String
|
||||
|
||||
"""Human-readable transfer speed"""
|
||||
formattedSpeed: String
|
||||
|
||||
"""Human-readable elapsed time"""
|
||||
formattedElapsedTime: String
|
||||
|
||||
"""Human-readable ETA"""
|
||||
formattedEta: String
|
||||
}
|
||||
|
||||
"""Status of a backup job"""
|
||||
enum BackupJobStatus {
|
||||
QUEUED
|
||||
RUNNING
|
||||
COMPLETED
|
||||
FAILED
|
||||
CANCELLED
|
||||
}
|
||||
|
||||
"""
|
||||
A date-time string at UTC, such as 2019-12-03T09:54:33Z, compliant with the date-time format.
|
||||
"""
|
||||
scalar DateTime
|
||||
|
||||
type RCloneDrive {
|
||||
"""Provider name"""
|
||||
name: String!
|
||||
@@ -693,6 +751,98 @@ type RCloneRemote {
|
||||
config: JSON!
|
||||
}
|
||||
|
||||
type RCloneJobStats {
|
||||
"""Bytes transferred"""
|
||||
bytes: Float
|
||||
|
||||
"""Transfer speed in bytes/sec"""
|
||||
speed: Float
|
||||
|
||||
"""Estimated time to completion in seconds"""
|
||||
eta: Float
|
||||
|
||||
"""Elapsed time in seconds"""
|
||||
elapsedTime: Float
|
||||
|
||||
"""Progress percentage (0-100)"""
|
||||
percentage: Float
|
||||
|
||||
"""Number of checks completed"""
|
||||
checks: Float
|
||||
|
||||
"""Number of deletes completed"""
|
||||
deletes: Float
|
||||
|
||||
"""Number of errors encountered"""
|
||||
errors: Float
|
||||
|
||||
"""Whether a fatal error occurred"""
|
||||
fatalError: Boolean
|
||||
|
||||
"""Last error message"""
|
||||
lastError: String
|
||||
|
||||
"""Number of renames completed"""
|
||||
renames: Float
|
||||
|
||||
"""Whether there is a retry error"""
|
||||
retryError: Boolean
|
||||
|
||||
"""Number of server-side copies"""
|
||||
serverSideCopies: Float
|
||||
|
||||
"""Bytes in server-side copies"""
|
||||
serverSideCopyBytes: Float
|
||||
|
||||
"""Number of server-side moves"""
|
||||
serverSideMoves: Float
|
||||
|
||||
"""Bytes in server-side moves"""
|
||||
serverSideMoveBytes: Float
|
||||
|
||||
"""Total bytes to transfer"""
|
||||
totalBytes: Float
|
||||
|
||||
"""Total checks to perform"""
|
||||
totalChecks: Float
|
||||
|
||||
"""Total transfers to perform"""
|
||||
totalTransfers: Float
|
||||
|
||||
"""Time spent transferring in seconds"""
|
||||
transferTime: Float
|
||||
|
||||
"""Number of transfers completed"""
|
||||
transfers: Float
|
||||
|
||||
"""Currently transferring files"""
|
||||
transferring: JSON
|
||||
|
||||
"""Currently checking files"""
|
||||
checking: JSON
|
||||
|
||||
"""Human-readable bytes transferred"""
|
||||
formattedBytes: String
|
||||
|
||||
"""Human-readable transfer speed"""
|
||||
formattedSpeed: String
|
||||
|
||||
"""Human-readable elapsed time"""
|
||||
formattedElapsedTime: String
|
||||
|
||||
"""Human-readable ETA"""
|
||||
formattedEta: String
|
||||
|
||||
"""Calculated percentage (fallback when percentage is null)"""
|
||||
calculatedPercentage: Float
|
||||
|
||||
"""Whether the job is actively running"""
|
||||
isActivelyRunning: Boolean
|
||||
|
||||
"""Whether the job is completed"""
|
||||
isCompleted: Boolean
|
||||
}
|
||||
|
||||
type ArrayMutations {
|
||||
"""Set array state"""
|
||||
setState(input: ArrayStateInput!): UnraidArray!
|
||||
@@ -764,6 +914,186 @@ type VmMutations {
|
||||
reset(id: PrefixedID!): Boolean!
|
||||
}
|
||||
|
||||
"""Backup related mutations"""
|
||||
type BackupMutations {
|
||||
"""Create a new backup job configuration"""
|
||||
createBackupJobConfig(input: CreateBackupJobConfigInput!): BackupJobConfig!
|
||||
|
||||
"""Update a backup job configuration"""
|
||||
updateBackupJobConfig(id: PrefixedID!, input: UpdateBackupJobConfigInput!): BackupJobConfig
|
||||
|
||||
"""Delete a backup job configuration"""
|
||||
deleteBackupJobConfig(id: PrefixedID!): Boolean!
|
||||
|
||||
"""Initiates a backup using a configured remote."""
|
||||
initiateBackup(input: InitiateBackupInput!): BackupStatus!
|
||||
|
||||
"""Toggle a backup job configuration enabled/disabled"""
|
||||
toggleJobConfig(id: PrefixedID!): BackupJobConfig
|
||||
|
||||
"""Manually trigger a backup job using existing configuration"""
|
||||
triggerJob(id: PrefixedID!): BackupStatus!
|
||||
|
||||
"""Stop all running backup jobs"""
|
||||
stopAllBackupJobs: BackupStatus!
|
||||
|
||||
"""Stop a specific backup job"""
|
||||
stopBackupJob(id: PrefixedID!): BackupStatus!
|
||||
|
||||
"""Forget all finished backup jobs to clean up the job list"""
|
||||
forgetFinishedBackupJobs: BackupStatus!
|
||||
}
|
||||
|
||||
input CreateBackupJobConfigInput {
|
||||
name: String!
|
||||
schedule: String
|
||||
enabled: Boolean! = true
|
||||
|
||||
"""Source configuration for this backup job"""
|
||||
sourceConfig: SourceConfigInput
|
||||
|
||||
"""Destination configuration for this backup job"""
|
||||
destinationConfig: DestinationConfigInput
|
||||
}
|
||||
|
||||
input SourceConfigInput {
|
||||
type: SourceType!
|
||||
|
||||
"""Timeout for backup operation in seconds"""
|
||||
timeout: Float! = 3600
|
||||
|
||||
"""Whether to cleanup on failure"""
|
||||
cleanupOnFailure: Boolean! = true
|
||||
zfsConfig: ZfsPreprocessConfigInput
|
||||
flashConfig: FlashPreprocessConfigInput
|
||||
scriptConfig: ScriptPreprocessConfigInput
|
||||
rawConfig: RawBackupConfigInput
|
||||
}
|
||||
|
||||
"""
|
||||
Type of backup to perform (ZFS snapshot, Flash backup, Custom script, or Raw file backup)
|
||||
"""
|
||||
enum SourceType {
|
||||
ZFS
|
||||
FLASH
|
||||
SCRIPT
|
||||
RAW
|
||||
}
|
||||
|
||||
input ZfsPreprocessConfigInput {
|
||||
"""Human-readable label for this source configuration"""
|
||||
label: String
|
||||
|
||||
"""ZFS pool name"""
|
||||
poolName: String!
|
||||
|
||||
"""Dataset name within the pool"""
|
||||
datasetName: String!
|
||||
|
||||
"""Snapshot name prefix"""
|
||||
snapshotPrefix: String
|
||||
|
||||
"""Whether to cleanup snapshots after backup"""
|
||||
cleanupSnapshots: Boolean! = true
|
||||
|
||||
"""Number of snapshots to retain"""
|
||||
retainSnapshots: Float
|
||||
}
|
||||
|
||||
input FlashPreprocessConfigInput {
|
||||
"""Human-readable label for this source configuration"""
|
||||
label: String
|
||||
|
||||
"""Flash drive mount path"""
|
||||
flashPath: String! = "/boot"
|
||||
|
||||
"""Whether to include git history"""
|
||||
includeGitHistory: Boolean! = true
|
||||
|
||||
"""Additional paths to include in backup"""
|
||||
additionalPaths: [String!]
|
||||
}
|
||||
|
||||
input ScriptPreprocessConfigInput {
|
||||
"""Human-readable label for this source configuration"""
|
||||
label: String
|
||||
|
||||
"""Path to the script file"""
|
||||
scriptPath: String!
|
||||
|
||||
"""Arguments to pass to the script"""
|
||||
scriptArgs: [String!]
|
||||
|
||||
"""Working directory for script execution"""
|
||||
workingDirectory: String
|
||||
|
||||
"""Environment variables for script execution"""
|
||||
environment: JSON
|
||||
|
||||
"""Output file path where script should write data"""
|
||||
outputPath: String!
|
||||
}
|
||||
|
||||
input RawBackupConfigInput {
|
||||
"""Human-readable label for this source configuration"""
|
||||
label: String
|
||||
|
||||
"""Source path to backup"""
|
||||
sourcePath: String!
|
||||
|
||||
"""File patterns to exclude from backup"""
|
||||
excludePatterns: [String!]
|
||||
|
||||
"""File patterns to include in backup"""
|
||||
includePatterns: [String!]
|
||||
}
|
||||
|
||||
input DestinationConfigInput {
|
||||
type: DestinationType!
|
||||
rcloneConfig: RcloneDestinationConfigInput
|
||||
}
|
||||
|
||||
enum DestinationType {
|
||||
RCLONE
|
||||
}
|
||||
|
||||
input RcloneDestinationConfigInput {
|
||||
remoteName: String!
|
||||
destinationPath: String!
|
||||
rcloneOptions: JSON
|
||||
}
|
||||
|
||||
input UpdateBackupJobConfigInput {
|
||||
name: String
|
||||
schedule: String
|
||||
enabled: Boolean
|
||||
|
||||
"""Source configuration for this backup job"""
|
||||
sourceConfig: SourceConfigInput
|
||||
|
||||
"""Destination configuration for this backup job"""
|
||||
destinationConfig: DestinationConfigInput
|
||||
lastRunStatus: String
|
||||
lastRunAt: String
|
||||
currentJobId: String
|
||||
}
|
||||
|
||||
input InitiateBackupInput {
|
||||
"""The name of the remote configuration to use for the backup."""
|
||||
remoteName: String!
|
||||
|
||||
"""Source path to backup."""
|
||||
sourcePath: String!
|
||||
|
||||
"""Destination path on the remote."""
|
||||
destinationPath: String!
|
||||
|
||||
"""
|
||||
Additional options for the backup operation, such as --dry-run or --transfers.
|
||||
"""
|
||||
options: JSON
|
||||
}
|
||||
|
||||
"""API Key related mutations"""
|
||||
type ApiKeyMutations {
|
||||
"""Create an API key"""
|
||||
@@ -886,10 +1216,125 @@ type ParityCheck {
|
||||
running: Boolean
|
||||
}
|
||||
|
||||
type FlashPreprocessConfig {
|
||||
label: String!
|
||||
flashPath: String!
|
||||
includeGitHistory: Boolean!
|
||||
additionalPaths: [String!]
|
||||
}
|
||||
|
||||
type RawBackupConfig {
|
||||
label: String!
|
||||
sourcePath: String!
|
||||
excludePatterns: [String!]
|
||||
includePatterns: [String!]
|
||||
}
|
||||
|
||||
type ScriptPreprocessConfig {
|
||||
label: String!
|
||||
scriptPath: String!
|
||||
scriptArgs: [String!]
|
||||
workingDirectory: String
|
||||
environment: JSON
|
||||
outputPath: String!
|
||||
}
|
||||
|
||||
type ZfsPreprocessConfig {
|
||||
label: String!
|
||||
poolName: String!
|
||||
datasetName: String!
|
||||
snapshotPrefix: String
|
||||
cleanupSnapshots: Boolean!
|
||||
retainSnapshots: Float
|
||||
}
|
||||
|
||||
type Backup implements Node {
|
||||
id: PrefixedID!
|
||||
jobs: [JobStatus!]!
|
||||
configs: [BackupJobConfig!]!
|
||||
|
||||
"""Get the status for the backup service"""
|
||||
status: BackupStatus!
|
||||
}
|
||||
|
||||
type BackupStatus {
|
||||
"""Status message indicating the outcome of the backup initiation."""
|
||||
status: String!
|
||||
|
||||
"""Job ID if available, can be used to check job status."""
|
||||
jobId: String
|
||||
}
|
||||
|
||||
type BackupJobConfig implements Node {
|
||||
id: PrefixedID!
|
||||
|
||||
"""Human-readable name for this backup job"""
|
||||
name: String!
|
||||
|
||||
"""Type of the backup source"""
|
||||
sourceType: SourceType!
|
||||
|
||||
"""Type of the backup destination"""
|
||||
destinationType: DestinationType!
|
||||
|
||||
"""Cron schedule expression (e.g., "0 2 * * *" for daily at 2AM)"""
|
||||
schedule: String!
|
||||
|
||||
"""Whether this backup job is enabled"""
|
||||
enabled: Boolean!
|
||||
|
||||
"""Source configuration for this backup job"""
|
||||
sourceConfig: SourceConfigUnion!
|
||||
|
||||
"""Destination configuration for this backup job"""
|
||||
destinationConfig: DestinationConfigUnion!
|
||||
|
||||
"""When this config was created"""
|
||||
createdAt: DateTimeISO!
|
||||
|
||||
"""When this config was last updated"""
|
||||
updatedAt: DateTimeISO!
|
||||
|
||||
"""Last time this job ran"""
|
||||
lastRunAt: DateTimeISO
|
||||
|
||||
"""Status of last run"""
|
||||
lastRunStatus: String
|
||||
|
||||
"""Current running job ID for this config"""
|
||||
currentJobId: String
|
||||
|
||||
"""Get the current running job for this backup config"""
|
||||
currentJob: JobStatus
|
||||
}
|
||||
|
||||
union SourceConfigUnion = ZfsPreprocessConfig | FlashPreprocessConfig | ScriptPreprocessConfig | RawBackupConfig
|
||||
|
||||
union DestinationConfigUnion = RcloneDestinationConfig
|
||||
|
||||
type RcloneDestinationConfig {
|
||||
type: String!
|
||||
|
||||
"""Remote name from rclone config"""
|
||||
remoteName: String!
|
||||
|
||||
"""Destination path on the remote"""
|
||||
destinationPath: String!
|
||||
|
||||
"""RClone options (e.g., --transfers, --checkers)"""
|
||||
rcloneOptions: JSON
|
||||
}
|
||||
|
||||
"""
|
||||
A date-time string at UTC, such as 2019-12-03T09:54:33Z, compliant with the date-time format.
|
||||
A date-time string at UTC, such as 2007-12-03T10:15:30Z, compliant with the `date-time` format outlined in section 5.6 of the RFC 3339 profile of the ISO 8601 standard for representation of dates and times using the Gregorian calendar.This scalar is serialized to a string in ISO 8601 format and parsed from a string in ISO 8601 format.
|
||||
"""
|
||||
scalar DateTime
|
||||
scalar DateTimeISO
|
||||
|
||||
type BackupJobConfigForm {
|
||||
id: PrefixedID!
|
||||
dataSchema: JSON!
|
||||
uiSchema: JSON!
|
||||
}
|
||||
|
||||
type Config implements Node {
|
||||
id: PrefixedID!
|
||||
@@ -1248,14 +1693,6 @@ type Docker implements Node {
|
||||
networks(skipCache: Boolean! = false): [DockerNetwork!]!
|
||||
}
|
||||
|
||||
type FlashBackupStatus {
|
||||
"""Status message indicating the outcome of the backup initiation."""
|
||||
status: String!
|
||||
|
||||
"""Job ID if available, can be used to check job status."""
|
||||
jobId: String
|
||||
}
|
||||
|
||||
type Flash implements Node {
|
||||
id: PrefixedID!
|
||||
guid: String!
|
||||
@@ -1658,13 +2095,27 @@ type Query {
|
||||
vms: Vms!
|
||||
parityHistory: [ParityCheck!]!
|
||||
array: UnraidArray!
|
||||
|
||||
"""Get backup service information"""
|
||||
backup: Backup!
|
||||
|
||||
"""Get a specific backup job configuration"""
|
||||
backupJobConfig(id: PrefixedID!): BackupJobConfig
|
||||
|
||||
"""Get status of a specific backup job"""
|
||||
backupJob(id: PrefixedID!): JobStatus
|
||||
|
||||
"""Get the JSON schema for backup job configuration form"""
|
||||
backupJobConfigForm(input: BackupJobConfigFormInput): BackupJobConfigForm!
|
||||
backupJobStatus(jobId: PrefixedID!): JobStatus
|
||||
allBackupJobStatuses: [JobStatus!]!
|
||||
rclone: RCloneBackupSettings!
|
||||
customization: Customization
|
||||
publicPartnerInfo: PublicPartnerInfo
|
||||
publicTheme: Theme!
|
||||
docker: Docker!
|
||||
disks: [Disk!]!
|
||||
disk(id: PrefixedID!): Disk!
|
||||
rclone: RCloneBackupSettings!
|
||||
settings: Settings!
|
||||
isSSOEnabled: Boolean!
|
||||
|
||||
@@ -1676,6 +2127,10 @@ type Query {
|
||||
cloud: Cloud!
|
||||
}
|
||||
|
||||
input BackupJobConfigFormInput {
|
||||
showAdvanced: Boolean! = false
|
||||
}
|
||||
|
||||
type Mutation {
|
||||
"""Creates a new notification record"""
|
||||
createNotification(input: NotificationData!): Notification!
|
||||
@@ -1699,12 +2154,10 @@ type Mutation {
|
||||
array: ArrayMutations!
|
||||
docker: DockerMutations!
|
||||
vm: VmMutations!
|
||||
backup: BackupMutations!
|
||||
parityCheck: ParityCheckMutations!
|
||||
apiKey: ApiKeyMutations!
|
||||
rclone: RCloneMutations!
|
||||
|
||||
"""Initiates a flash drive backup using a configured remote."""
|
||||
initiateFlashBackup(input: InitiateFlashBackupInput!): FlashBackupStatus!
|
||||
updateSettings(input: JSON!): UpdateSettingsResponse!
|
||||
|
||||
"""
|
||||
@@ -1731,22 +2184,6 @@ input NotificationData {
|
||||
link: String
|
||||
}
|
||||
|
||||
input InitiateFlashBackupInput {
|
||||
"""The name of the remote configuration to use for the backup."""
|
||||
remoteName: String!
|
||||
|
||||
"""Source path to backup (typically the flash drive)."""
|
||||
sourcePath: String!
|
||||
|
||||
"""Destination path on the remote."""
|
||||
destinationPath: String!
|
||||
|
||||
"""
|
||||
Additional options for the backup operation, such as --dry-run or --transfers.
|
||||
"""
|
||||
options: JSON
|
||||
}
|
||||
|
||||
input PluginManagementInput {
|
||||
"""Array of plugin package names to add or remove"""
|
||||
names: [String!]!
|
||||
|
||||
@@ -94,7 +94,7 @@
|
||||
"command-exists": "1.2.9",
|
||||
"convert": "5.12.0",
|
||||
"cookie": "1.0.2",
|
||||
"cron": "4.3.1",
|
||||
"cron": "4.3.0",
|
||||
"cross-fetch": "4.1.0",
|
||||
"diff": "8.0.2",
|
||||
"dockerode": "4.0.7",
|
||||
@@ -192,6 +192,7 @@
|
||||
"@types/wtfnode": "0.7.3",
|
||||
"@vitest/coverage-v8": "3.2.4",
|
||||
"@vitest/ui": "3.2.4",
|
||||
"commit-and-tag-version": "9.6.0",
|
||||
"cz-conventional-changelog": "3.3.0",
|
||||
"eslint": "9.30.1",
|
||||
"eslint-plugin-import": "2.32.0",
|
||||
@@ -203,7 +204,6 @@
|
||||
"nodemon": "3.1.10",
|
||||
"prettier": "3.6.2",
|
||||
"rollup-plugin-node-externals": "8.0.1",
|
||||
"commit-and-tag-version": "9.6.0",
|
||||
"tsx": "4.20.3",
|
||||
"type-fest": "4.41.0",
|
||||
"typescript": "5.8.3",
|
||||
@@ -225,7 +225,8 @@
|
||||
"nest-authz": {
|
||||
"@nestjs/common": "$@nestjs/common",
|
||||
"@nestjs/core": "$@nestjs/core"
|
||||
}
|
||||
},
|
||||
"cron": "4.3.1"
|
||||
},
|
||||
"private": true,
|
||||
"packageManager": "pnpm@10.13.1"
|
||||
|
||||
@@ -11,6 +11,7 @@ import {
|
||||
RCloneStartBackupInput,
|
||||
UpdateRCloneRemoteDto,
|
||||
} from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
|
||||
import { FormatService } from '@app/unraid-api/utils/format.service.js';
|
||||
|
||||
vi.mock('got');
|
||||
vi.mock('execa');
|
||||
@@ -55,6 +56,8 @@ describe('RCloneApiService', () => {
|
||||
let mockExeca: any;
|
||||
let mockPRetry: any;
|
||||
let mockExistsSync: any;
|
||||
let mockFormatService: FormatService;
|
||||
let mockCacheManager: any;
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
@@ -69,18 +72,67 @@ describe('RCloneApiService', () => {
|
||||
mockPRetry = vi.mocked(pRetry.default);
|
||||
mockExistsSync = vi.mocked(existsSync);
|
||||
|
||||
mockGot.post = vi.fn().mockResolvedValue({ body: {} });
|
||||
mockExeca.mockReturnValue({
|
||||
on: vi.fn(),
|
||||
kill: vi.fn(),
|
||||
killed: false,
|
||||
pid: 12345,
|
||||
} as any);
|
||||
mockGot.post = vi.fn().mockImplementation((url: string) => {
|
||||
// Mock the core/pid call to indicate socket is running
|
||||
if (url.includes('core/pid')) {
|
||||
return Promise.resolve({ body: { pid: 12345 } });
|
||||
}
|
||||
return Promise.resolve({ body: {} });
|
||||
});
|
||||
// Mock execa to return a resolved promise for rclone version check
|
||||
mockExeca.mockImplementation((cmd: string, args: string[]) => {
|
||||
if (cmd === 'rclone' && args[0] === 'version') {
|
||||
return Promise.resolve({ stdout: 'rclone v1.67.0', stderr: '', exitCode: 0 } as any);
|
||||
}
|
||||
return {
|
||||
on: vi.fn(),
|
||||
kill: vi.fn(),
|
||||
killed: false,
|
||||
pid: 12345,
|
||||
} as any;
|
||||
});
|
||||
mockPRetry.mockResolvedValue(undefined);
|
||||
mockExistsSync.mockReturnValue(false);
|
||||
// Mock socket exists
|
||||
mockExistsSync.mockReturnValue(true);
|
||||
|
||||
service = new RCloneApiService();
|
||||
await service.onModuleInit();
|
||||
mockFormatService = {
|
||||
formatBytes: vi.fn(),
|
||||
formatDuration: vi.fn(),
|
||||
} as any;
|
||||
|
||||
// Mock RCloneStatusService
|
||||
const mockStatusService = {
|
||||
enhanceStatsWithFormattedFields: vi.fn(),
|
||||
transformStatsToJob: vi.fn(),
|
||||
calculateCombinedStats: vi.fn(),
|
||||
parseActiveJobs: vi.fn(),
|
||||
parseBackupStatus: vi.fn(),
|
||||
parseJobWithStats: vi.fn(),
|
||||
parseAllJobsWithStats: vi.fn(),
|
||||
parseJobsWithStats: vi.fn(),
|
||||
getBackupStatus: vi.fn(),
|
||||
} as any;
|
||||
|
||||
// Mock StreamingJobManager
|
||||
const mockStreamingJobManager = {
|
||||
startJob: vi.fn(),
|
||||
stopJob: vi.fn(),
|
||||
getJobStatus: vi.fn(),
|
||||
getAllJobs: vi.fn(),
|
||||
} as any;
|
||||
|
||||
// Mock cache manager
|
||||
mockCacheManager = {
|
||||
get: vi.fn().mockResolvedValue(null),
|
||||
set: vi.fn().mockResolvedValue(undefined),
|
||||
del: vi.fn().mockResolvedValue(undefined),
|
||||
};
|
||||
|
||||
service = new RCloneApiService(mockStatusService);
|
||||
// Mock the service as initialized without actually running onModuleInit
|
||||
// to avoid the initialization API calls
|
||||
(service as any).initialized = true;
|
||||
(service as any).rcloneBaseUrl = 'http://unix:/tmp/rclone.sock:';
|
||||
});
|
||||
|
||||
describe('getProviders', () => {
|
||||
@@ -248,6 +300,9 @@ describe('RCloneApiService', () => {
|
||||
options: { delete_on: 'dst' },
|
||||
};
|
||||
const mockResponse = { jobid: 'job-123' };
|
||||
|
||||
// Clear previous mock calls and set up fresh mock
|
||||
mockGot.post.mockClear();
|
||||
mockGot.post.mockResolvedValue({ body: mockResponse });
|
||||
|
||||
const result = await service.startBackup(input);
|
||||
@@ -256,11 +311,11 @@ describe('RCloneApiService', () => {
|
||||
expect(mockGot.post).toHaveBeenCalledWith(
|
||||
'http://unix:/tmp/rclone.sock:/sync/copy',
|
||||
expect.objectContaining({
|
||||
json: {
|
||||
json: expect.objectContaining({
|
||||
srcFs: '/source/path',
|
||||
dstFs: 'remote:backup/path',
|
||||
delete_on: 'dst',
|
||||
},
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
@@ -269,8 +324,22 @@ describe('RCloneApiService', () => {
|
||||
describe('getJobStatus', () => {
|
||||
it('should return job status', async () => {
|
||||
const input: GetRCloneJobStatusDto = { jobId: 'job-123' };
|
||||
const mockStatus = { status: 'running', progress: 0.5 };
|
||||
mockGot.post.mockResolvedValue({ body: mockStatus });
|
||||
const mockStatus = { id: 'job-123', status: 'running', progress: 0.5 };
|
||||
mockGot.post.mockImplementation((url: string) => {
|
||||
if (url.includes('core/stats')) {
|
||||
return Promise.resolve({ body: {} });
|
||||
}
|
||||
if (url.includes('job/status')) {
|
||||
return Promise.resolve({ body: mockStatus });
|
||||
}
|
||||
return Promise.resolve({ body: {} });
|
||||
});
|
||||
|
||||
// Mock the status service methods
|
||||
const mockStatusService = (service as any).statusService;
|
||||
mockStatusService.enhanceStatsWithFormattedFields = vi.fn().mockReturnValue({});
|
||||
mockStatusService.transformStatsToJob = vi.fn().mockReturnValue(null);
|
||||
mockStatusService.parseJobWithStats = vi.fn().mockReturnValue(mockStatus);
|
||||
|
||||
const result = await service.getJobStatus(input);
|
||||
|
||||
@@ -335,7 +404,7 @@ describe('RCloneApiService', () => {
|
||||
mockGot.post.mockRejectedValue(httpError);
|
||||
|
||||
await expect(service.getProviders()).rejects.toThrow(
|
||||
'Rclone API Error (config/providers, HTTP 404): Failed to process error response body. Raw body:'
|
||||
'Rclone API Error (config/providers, HTTP 404): Failed to process error response: '
|
||||
);
|
||||
});
|
||||
|
||||
@@ -352,7 +421,7 @@ describe('RCloneApiService', () => {
|
||||
mockGot.post.mockRejectedValue(httpError);
|
||||
|
||||
await expect(service.getProviders()).rejects.toThrow(
|
||||
'Rclone API Error (config/providers, HTTP 400): Failed to process error response body. Raw body: invalid json'
|
||||
'Rclone API Error (config/providers, HTTP 400): Failed to process error response: invalid json'
|
||||
);
|
||||
});
|
||||
|
||||
@@ -367,7 +436,7 @@ describe('RCloneApiService', () => {
|
||||
mockGot.post.mockRejectedValue('unknown error');
|
||||
|
||||
await expect(service.getProviders()).rejects.toThrow(
|
||||
'Unknown error calling RClone API (config/providers) with params {}: unknown error'
|
||||
'Unknown error calling RClone API (config/providers): unknown error'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -31,6 +31,7 @@ exports[`Returns paths 1`] = `
|
||||
"activationBase",
|
||||
"webGuiBase",
|
||||
"identConfig",
|
||||
"backupBase",
|
||||
"activation",
|
||||
"boot",
|
||||
"webgui",
|
||||
|
||||
@@ -71,6 +71,7 @@ const initialState = {
|
||||
),
|
||||
webGuiBase: '/usr/local/emhttp/webGui' as const,
|
||||
identConfig: resolvePath(process.env.PATHS_IDENT_CONFIG ?? ('/boot/config/ident.cfg' as const)),
|
||||
backupBase: resolvePath(process.env.PATHS_BACKUP_JOBS ?? ('/boot/config/api/backup/' as const)),
|
||||
};
|
||||
|
||||
// Derive asset paths from base paths
|
||||
|
||||
@@ -75,7 +75,7 @@ export class AuthService {
|
||||
|
||||
// Now get the updated roles
|
||||
const existingRoles = await this.authzService.getRolesForUser(user.id);
|
||||
this.logger.debug(`User ${user.id} has roles: ${existingRoles}`);
|
||||
this.logger.verbose(`User ${user.id} has roles: ${existingRoles}`);
|
||||
|
||||
return user;
|
||||
} catch (error: unknown) {
|
||||
@@ -213,7 +213,7 @@ export class AuthService {
|
||||
...rolesToRemove.map((role) => this.authzService.deleteRoleForUser(userId, role)),
|
||||
]);
|
||||
|
||||
this.logger.debug(
|
||||
this.logger.verbose(
|
||||
`Synced roles for user ${userId}. Added: ${rolesToAdd.join(
|
||||
','
|
||||
)}, Removed: ${rolesToRemove.join(',')}`
|
||||
@@ -234,7 +234,6 @@ export class AuthService {
|
||||
* @returns a service account that represents the user session (i.e. a webgui user).
|
||||
*/
|
||||
async getSessionUser(): Promise<UserAccount> {
|
||||
this.logger.debug('getSessionUser called!');
|
||||
return {
|
||||
id: '-1',
|
||||
description: 'Session receives administrator permissions',
|
||||
|
||||
@@ -0,0 +1,600 @@
|
||||
import { forwardRef, Inject, Injectable, Logger, OnModuleInit } from '@nestjs/common';
|
||||
import { SchedulerRegistry } from '@nestjs/schedule';
|
||||
import { existsSync } from 'fs';
|
||||
import { readFile, writeFile } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
|
||||
import { CronJob } from 'cron';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
|
||||
import { getters } from '@app/store/index.js';
|
||||
import {
|
||||
BackupJobConfig,
|
||||
CreateBackupJobConfigInput,
|
||||
UpdateBackupJobConfigInput,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/backup.model.js';
|
||||
import { getBackupJobGroupId } from '@app/unraid-api/graph/resolvers/backup/backup.utils.js';
|
||||
import {
|
||||
DestinationConfigInput,
|
||||
DestinationType,
|
||||
RcloneDestinationConfig,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/destination/backup-destination.types.js';
|
||||
import { BackupOrchestrationService } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-orchestration.service.js';
|
||||
import {
|
||||
FlashPreprocessConfig,
|
||||
RawBackupConfig,
|
||||
ScriptPreprocessConfig,
|
||||
SourceConfigInput,
|
||||
SourceType,
|
||||
ZfsPreprocessConfig,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/source/backup-source.types.js';
|
||||
import { RCloneService } from '@app/unraid-api/graph/resolvers/rclone/rclone.service.js';
|
||||
|
||||
const JOB_GROUP_PREFIX = 'backup-';
|
||||
|
||||
@Injectable()
|
||||
export class BackupConfigService implements OnModuleInit {
|
||||
private readonly logger = new Logger(BackupConfigService.name);
|
||||
private readonly configPath: string;
|
||||
private configs: Map<string, BackupJobConfig> = new Map();
|
||||
|
||||
constructor(
|
||||
private readonly rcloneService: RCloneService,
|
||||
private readonly schedulerRegistry: SchedulerRegistry,
|
||||
@Inject(forwardRef(() => BackupOrchestrationService))
|
||||
private readonly backupOrchestrationService: BackupOrchestrationService
|
||||
) {
|
||||
const paths = getters.paths();
|
||||
this.configPath = join(paths.backupBase, 'backup-jobs.json');
|
||||
}
|
||||
|
||||
async onModuleInit(): Promise<void> {
|
||||
await this.loadConfigs();
|
||||
}
|
||||
|
||||
private transformSourceConfigInput(
|
||||
input: SourceConfigInput
|
||||
): ZfsPreprocessConfig | FlashPreprocessConfig | ScriptPreprocessConfig | RawBackupConfig {
|
||||
switch (input.type) {
|
||||
case SourceType.ZFS: {
|
||||
if (!input.zfsConfig) {
|
||||
throw new Error('ZFS configuration is required when type is ZFS');
|
||||
}
|
||||
const zfsConfig = new ZfsPreprocessConfig();
|
||||
zfsConfig.label = input.zfsConfig.label || 'ZFS backup';
|
||||
zfsConfig.poolName = input.zfsConfig.poolName;
|
||||
zfsConfig.datasetName = input.zfsConfig.datasetName;
|
||||
zfsConfig.snapshotPrefix = input.zfsConfig.snapshotPrefix;
|
||||
zfsConfig.cleanupSnapshots = input.zfsConfig.cleanupSnapshots ?? true;
|
||||
zfsConfig.retainSnapshots = input.zfsConfig.retainSnapshots;
|
||||
return zfsConfig;
|
||||
}
|
||||
|
||||
case SourceType.FLASH: {
|
||||
if (!input.flashConfig) {
|
||||
throw new Error('Flash configuration is required when type is FLASH');
|
||||
}
|
||||
const flashConfig = new FlashPreprocessConfig();
|
||||
flashConfig.label = input.flashConfig.label || 'Flash drive backup';
|
||||
flashConfig.flashPath = input.flashConfig.flashPath || '/boot';
|
||||
flashConfig.includeGitHistory = input.flashConfig.includeGitHistory ?? true;
|
||||
flashConfig.additionalPaths = input.flashConfig.additionalPaths || [];
|
||||
return flashConfig;
|
||||
}
|
||||
|
||||
case SourceType.SCRIPT: {
|
||||
if (!input.scriptConfig) {
|
||||
throw new Error('Script configuration is required when type is SCRIPT');
|
||||
}
|
||||
const scriptConfig = new ScriptPreprocessConfig();
|
||||
scriptConfig.label = input.scriptConfig.label || 'Script backup';
|
||||
scriptConfig.scriptPath = input.scriptConfig.scriptPath;
|
||||
scriptConfig.scriptArgs = input.scriptConfig.scriptArgs || [];
|
||||
scriptConfig.workingDirectory = input.scriptConfig.workingDirectory;
|
||||
scriptConfig.environment = input.scriptConfig.environment;
|
||||
scriptConfig.outputPath = input.scriptConfig.outputPath;
|
||||
return scriptConfig;
|
||||
}
|
||||
|
||||
case SourceType.RAW: {
|
||||
if (!input.rawConfig) {
|
||||
throw new Error('Raw configuration is required when type is RAW');
|
||||
}
|
||||
const rawConfig = new RawBackupConfig();
|
||||
rawConfig.label = input.rawConfig.label || 'Raw file backup';
|
||||
rawConfig.sourcePath = input.rawConfig.sourcePath;
|
||||
rawConfig.excludePatterns = input.rawConfig.excludePatterns || [];
|
||||
rawConfig.includePatterns = input.rawConfig.includePatterns || [];
|
||||
return rawConfig;
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`Unsupported source type: ${input.type}`);
|
||||
}
|
||||
}
|
||||
|
||||
private transformDestinationConfigInput(input: DestinationConfigInput): RcloneDestinationConfig {
|
||||
switch (input.type) {
|
||||
case DestinationType.RCLONE: {
|
||||
if (!input.rcloneConfig) {
|
||||
throw new Error('RClone configuration is required when type is RCLONE');
|
||||
}
|
||||
const rcloneConfig = new RcloneDestinationConfig();
|
||||
rcloneConfig.type = 'RCLONE';
|
||||
rcloneConfig.remoteName = input.rcloneConfig.remoteName;
|
||||
rcloneConfig.destinationPath = input.rcloneConfig.destinationPath;
|
||||
rcloneConfig.rcloneOptions = input.rcloneConfig.rcloneOptions;
|
||||
return rcloneConfig;
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`Unsupported destination type: ${input.type}`);
|
||||
}
|
||||
}
|
||||
|
||||
async createBackupJobConfig(input: CreateBackupJobConfigInput): Promise<BackupJobConfig> {
|
||||
const id = uuidv4();
|
||||
const now = new Date().toISOString();
|
||||
|
||||
// Validate input sourceConfig and destinationConfig presence
|
||||
if (!input.sourceConfig) {
|
||||
this.logger.error('Source configuration (sourceConfig) is required.');
|
||||
throw new Error('Source configuration (sourceConfig) is required.');
|
||||
}
|
||||
if (!input.destinationConfig) {
|
||||
this.logger.error('Destination configuration (destinationConfig) is required.');
|
||||
throw new Error('Destination configuration (destinationConfig) is required.');
|
||||
}
|
||||
|
||||
// Extract sourceType and destinationType from the respective config objects
|
||||
const sourceType = input.sourceConfig.type;
|
||||
const destinationType = input.destinationConfig.type;
|
||||
|
||||
if (!sourceType) {
|
||||
this.logger.error("Source configuration must include a valid 'type' property.");
|
||||
throw new Error("Source configuration must include a valid 'type' property.");
|
||||
}
|
||||
if (!destinationType) {
|
||||
this.logger.error("Destination configuration must include a valid 'type' property.");
|
||||
throw new Error("Destination configuration must include a valid 'type' property.");
|
||||
}
|
||||
|
||||
// Transform the source config input into the appropriate union member
|
||||
const transformedSourceConfig = this.transformSourceConfigInput(input.sourceConfig);
|
||||
|
||||
// Transform the destination config input into the appropriate union member
|
||||
const transformedDestinationConfig = this.transformDestinationConfigInput(
|
||||
input.destinationConfig
|
||||
);
|
||||
|
||||
const config: BackupJobConfig = {
|
||||
id,
|
||||
name: input.name,
|
||||
sourceType,
|
||||
destinationType,
|
||||
schedule: input.schedule || '0 2 * * *',
|
||||
enabled: input.enabled,
|
||||
sourceConfig: transformedSourceConfig,
|
||||
destinationConfig: transformedDestinationConfig,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
};
|
||||
|
||||
this.configs.set(id, config);
|
||||
await this.saveConfigs();
|
||||
|
||||
if (config.enabled) {
|
||||
this.scheduleJob(config);
|
||||
}
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
async updateBackupJobConfig(
|
||||
id: string,
|
||||
input: UpdateBackupJobConfigInput
|
||||
): Promise<BackupJobConfig | null> {
|
||||
this.logger.debug(
|
||||
`[updateBackupJobConfig] Called with ID: ${id}, Input: ${JSON.stringify(input)}`
|
||||
);
|
||||
const existing = this.configs.get(id);
|
||||
if (!existing) {
|
||||
this.logger.warn(`[updateBackupJobConfig] No existing config found for ID: ${id}`);
|
||||
return null;
|
||||
}
|
||||
this.logger.debug(
|
||||
`[updateBackupJobConfig] Existing config for ID ${id}: ${JSON.stringify(existing)}`
|
||||
);
|
||||
|
||||
// Handle sourceConfig update
|
||||
let updatedSourceConfig = existing.sourceConfig;
|
||||
let updatedSourceType = existing.sourceType;
|
||||
if (input.sourceConfig) {
|
||||
const inputSourceType = input.sourceConfig.type;
|
||||
if (!inputSourceType) {
|
||||
this.logger.warn(
|
||||
`[updateBackupJobConfig] Source config update for ID ${id} is missing 'type'. Update skipped for sourceConfig.`
|
||||
);
|
||||
} else {
|
||||
// Transform the input into the appropriate union member
|
||||
updatedSourceConfig = this.transformSourceConfigInput(input.sourceConfig);
|
||||
updatedSourceType = inputSourceType;
|
||||
this.logger.debug(`[updateBackupJobConfig] Transformed sourceConfig for ${id}.`);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle destinationConfig update
|
||||
let updatedDestinationConfig = existing.destinationConfig;
|
||||
let updatedDestinationType = existing.destinationType;
|
||||
if (input.destinationConfig) {
|
||||
const inputDestinationType = input.destinationConfig.type;
|
||||
if (!inputDestinationType) {
|
||||
this.logger.warn(
|
||||
`[updateBackupJobConfig] Destination config update for ID ${id} is missing 'type'. Update skipped for destinationConfig.`
|
||||
);
|
||||
} else {
|
||||
// Transform the input into the appropriate union member
|
||||
updatedDestinationConfig = this.transformDestinationConfigInput(input.destinationConfig);
|
||||
updatedDestinationType = inputDestinationType;
|
||||
this.logger.debug(`[updateBackupJobConfig] Updated destinationConfig for ${id}.`);
|
||||
}
|
||||
}
|
||||
|
||||
const updated: BackupJobConfig = {
|
||||
...existing,
|
||||
name: input.name ?? existing.name,
|
||||
schedule: input.schedule ?? existing.schedule,
|
||||
enabled: input.enabled ?? existing.enabled,
|
||||
sourceType: updatedSourceType,
|
||||
destinationType: updatedDestinationType,
|
||||
sourceConfig: updatedSourceConfig,
|
||||
destinationConfig: updatedDestinationConfig,
|
||||
updatedAt: new Date().toISOString(),
|
||||
lastRunAt: input.lastRunAt !== undefined ? input.lastRunAt : existing.lastRunAt,
|
||||
lastRunStatus:
|
||||
input.lastRunStatus !== undefined ? input.lastRunStatus : existing.lastRunStatus,
|
||||
};
|
||||
|
||||
this.logger.debug(
|
||||
`[updateBackupJobConfig] Updated object for ID ${id} (before set): ${JSON.stringify(updated)}`
|
||||
);
|
||||
|
||||
this.configs.set(id, updated);
|
||||
await this.saveConfigs();
|
||||
this.logger.debug(`[updateBackupJobConfig] Configs saved for ID: ${id}`);
|
||||
|
||||
this.unscheduleJob(id);
|
||||
if (updated.enabled) {
|
||||
this.scheduleJob(updated);
|
||||
}
|
||||
|
||||
return updated;
|
||||
}
|
||||
|
||||
async deleteBackupJobConfig(id: string): Promise<boolean> {
|
||||
const config = this.configs.get(id);
|
||||
if (!config) return false;
|
||||
|
||||
this.unscheduleJob(id);
|
||||
this.configs.delete(id);
|
||||
await this.saveConfigs();
|
||||
return true;
|
||||
}
|
||||
|
||||
async getBackupJobConfig(id: string): Promise<BackupJobConfig | null> {
|
||||
this.logger.debug(`[getBackupJobConfig] Called for ID: ${id}`);
|
||||
const config = this.configs.get(id);
|
||||
if (config) {
|
||||
this.logger.debug(
|
||||
`[getBackupJobConfig] Found config for ID ${id}: ${JSON.stringify(config)}`
|
||||
);
|
||||
} else {
|
||||
this.logger.warn(`[getBackupJobConfig] No config found for ID: ${id}`);
|
||||
}
|
||||
return config || null;
|
||||
}
|
||||
|
||||
async getAllBackupJobConfigs(): Promise<BackupJobConfig[]> {
|
||||
return Array.from(this.configs.values());
|
||||
}
|
||||
|
||||
private transformPlainObjectToSourceConfig(
|
||||
obj: any,
|
||||
sourceType: SourceType
|
||||
): ZfsPreprocessConfig | FlashPreprocessConfig | ScriptPreprocessConfig | RawBackupConfig {
|
||||
switch (sourceType) {
|
||||
case SourceType.ZFS: {
|
||||
const zfsConfig = new ZfsPreprocessConfig();
|
||||
Object.assign(zfsConfig, obj);
|
||||
return zfsConfig;
|
||||
}
|
||||
case SourceType.FLASH: {
|
||||
const flashConfig = new FlashPreprocessConfig();
|
||||
Object.assign(flashConfig, obj);
|
||||
return flashConfig;
|
||||
}
|
||||
case SourceType.SCRIPT: {
|
||||
const scriptConfig = new ScriptPreprocessConfig();
|
||||
Object.assign(scriptConfig, obj);
|
||||
return scriptConfig;
|
||||
}
|
||||
case SourceType.RAW: {
|
||||
const rawConfig = new RawBackupConfig();
|
||||
Object.assign(rawConfig, obj);
|
||||
return rawConfig;
|
||||
}
|
||||
default:
|
||||
this.logger.error(
|
||||
`Unsupported source type encountered during plain object transformation: ${sourceType as string}`
|
||||
);
|
||||
throw new Error(`Unsupported source type: ${sourceType as string}`);
|
||||
}
|
||||
}
|
||||
|
||||
private transformPlainObjectToDestinationConfig(
|
||||
obj: any,
|
||||
destinationType: DestinationType
|
||||
): RcloneDestinationConfig {
|
||||
switch (destinationType) {
|
||||
case DestinationType.RCLONE: {
|
||||
const rcloneConfig = new RcloneDestinationConfig();
|
||||
Object.assign(rcloneConfig, obj);
|
||||
return rcloneConfig;
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`Unsupported destination type: ${destinationType}`);
|
||||
}
|
||||
}
|
||||
|
||||
private async executeBackupJob(config: BackupJobConfig): Promise<void> {
|
||||
this.logger.log(
|
||||
`Executing backup job via BackupOrchestrationService: ${config.name} (ID: ${config.id})`
|
||||
);
|
||||
|
||||
// Prepare updates, currentJobId will be set after job starts
|
||||
const updatesForInMemoryConfig: Partial<BackupJobConfig> = {
|
||||
lastRunAt: new Date().toISOString(),
|
||||
lastRunStatus: 'Starting...',
|
||||
currentJobId: undefined, // Initialize
|
||||
};
|
||||
|
||||
try {
|
||||
// Delegate to the BackupOrchestrationService and get the jobId
|
||||
// IMPORTANT: This assumes backupOrchestrationService.executeBackupJob is modified to return the jobId string
|
||||
const jobId = await this.backupOrchestrationService.executeBackupJob(config, config.id);
|
||||
|
||||
if (jobId) {
|
||||
updatesForInMemoryConfig.currentJobId = jobId;
|
||||
this.logger.log(
|
||||
`Backup job ${config.name} (ID: ${config.id}) initiated by BackupOrchestrationService with Job ID: ${jobId}.`
|
||||
);
|
||||
} else {
|
||||
this.logger.warn(
|
||||
`BackupOrchestrationService.executeBackupJob did not return a jobId for config ${config.id}. currentJobId will not be set.`
|
||||
);
|
||||
}
|
||||
|
||||
// Update the in-memory config with all changes including currentJobId
|
||||
const currentConfig = this.configs.get(config.id);
|
||||
if (currentConfig) {
|
||||
this.configs.set(config.id, {
|
||||
...currentConfig,
|
||||
...updatesForInMemoryConfig,
|
||||
});
|
||||
} else {
|
||||
this.logger.warn(
|
||||
`Config ${config.id} not found in memory map after starting job. State may be inconsistent.`
|
||||
);
|
||||
// Fallback: attempt to set it anyway, though this indicates a potential issue
|
||||
this.configs.set(config.id, {
|
||||
...config, // Use the passed config as a base
|
||||
...updatesForInMemoryConfig,
|
||||
});
|
||||
}
|
||||
|
||||
// Persist only non-transient parts to backup-jobs.json
|
||||
// Create a separate object for saving that omits currentJobId
|
||||
const configToPersist = {
|
||||
...(this.configs.get(config.id) || config), // Get the most up-to-date version from memory
|
||||
};
|
||||
delete configToPersist.currentJobId; // Ensure currentJobId is not persisted
|
||||
configToPersist.lastRunAt = updatesForInMemoryConfig.lastRunAt;
|
||||
configToPersist.lastRunStatus = updatesForInMemoryConfig.lastRunStatus;
|
||||
|
||||
// Update the map with the version to be persisted, then save
|
||||
// This is tricky because we want currentJobId in memory but not on disk.
|
||||
// A better approach might be to manage currentJobId in a separate map or handle it during serialization.
|
||||
// For now, we'll update the main config, then save a version without currentJobId.
|
||||
// This means this.configs.get(config.id) will have currentJobId.
|
||||
|
||||
// Create a shallow copy for saving, minus currentJobId.
|
||||
const { currentJobId: _, ...persistentConfigData } = this.configs.get(config.id)!;
|
||||
// Create a new map for saving or filter this.configs map during saveConfigs()
|
||||
// To avoid mutating this.configs directly for persistence:
|
||||
const tempConfigsForSave = new Map(this.configs);
|
||||
tempConfigsForSave.set(config.id, persistentConfigData as BackupJobConfig);
|
||||
// Modify saveConfigs to accept a map or make it aware of not saving currentJobId.
|
||||
// For simplicity now, we'll assume saveConfigs handles this or we handle it before calling.
|
||||
// The current saveConfigs just iterates this.configs.values().
|
||||
|
||||
// Let's ensure the main in-memory config (this.configs) has currentJobId.
|
||||
// And when saving, saveConfigs needs to be aware or we provide a filtered list.
|
||||
|
||||
// Simplification: Save current status but not currentJobId.
|
||||
// We will modify saveConfigs later if needed. For now, this means currentJobId is purely in-memory.
|
||||
// The state in `this.configs` *will* have `currentJobId`.
|
||||
// `saveConfigs` will write it to disk if not handled.
|
||||
// Let's assume for now this is acceptable and address saveConfigs separately if `currentJobId` appears in JSON.
|
||||
// The current saveConfigs WILL persist currentJobId.
|
||||
//
|
||||
// Correct approach: Update in-memory, then save a version *without* currentJobId.
|
||||
// This requires `saveConfigs` to be smarter or to pass it a temporary, filtered list.
|
||||
// The `this.configs.set(config.id, persistentConfig)` line from my thought process was problematic.
|
||||
|
||||
// The in-memory `this.configs.get(config.id)` now correctly has the `currentJobId`.
|
||||
// When `saveConfigs()` is called, it will iterate `this.configs.values()`.
|
||||
// We need to ensure `currentJobId` is stripped before writing to JSON.
|
||||
// This should be done in `saveConfigs` or by passing a "cleaned" list to `writeFile`.
|
||||
// For now, let `saveConfigs` persist it and we can clean it up in a follow-up if it's an issue.
|
||||
// The immediate goal is for the GraphQL resolver to see currentJobId.
|
||||
|
||||
// Save the config with lastRunAt and lastRunStatus (currentJobId will also be saved by current saveConfigs)
|
||||
await this.saveConfigs();
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
this.logger.error(
|
||||
`Backup job ${config.name} (ID: ${config.id}) failed during orchestration: ${errorMessage}`,
|
||||
(error as Error).stack
|
||||
);
|
||||
|
||||
const currentConfig = this.configs.get(config.id);
|
||||
const failedConfigUpdate = {
|
||||
lastRunAt: new Date().toISOString(),
|
||||
lastRunStatus: `Failed: ${errorMessage}`,
|
||||
currentJobId: undefined, // Clear currentJobId on failure
|
||||
};
|
||||
|
||||
if (currentConfig) {
|
||||
this.configs.set(config.id, {
|
||||
...currentConfig,
|
||||
...failedConfigUpdate,
|
||||
});
|
||||
} else {
|
||||
// If not in map, use passed config as base
|
||||
this.configs.set(config.id, {
|
||||
...config,
|
||||
...failedConfigUpdate,
|
||||
});
|
||||
}
|
||||
await this.saveConfigs(); // Save updated status, currentJobId will be cleared
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Add a new method to be called when a job completes or is stopped
|
||||
public async handleJobCompletion(
|
||||
configId: string,
|
||||
finalStatus: string,
|
||||
jobId?: string
|
||||
): Promise<void> {
|
||||
const config = this.configs.get(configId);
|
||||
if (config) {
|
||||
this.logger.log(
|
||||
`Handling job completion for config ${configId}, job ${jobId}. Final status: ${finalStatus}`
|
||||
);
|
||||
|
||||
const updates: Partial<BackupJobConfig> = {
|
||||
lastRunStatus: finalStatus,
|
||||
lastRunAt: new Date().toISOString(), // Update lastRunAt to completion time
|
||||
};
|
||||
|
||||
// Only clear currentJobId if it matches the completed/stopped job
|
||||
if (config.currentJobId === jobId) {
|
||||
updates.currentJobId = undefined;
|
||||
} else if (jobId && config.currentJobId) {
|
||||
this.logger.warn(
|
||||
`Completed job ID ${jobId} does not match currentJobId ${config.currentJobId} for config ${configId}. currentJobId not cleared.`
|
||||
);
|
||||
}
|
||||
|
||||
this.configs.set(configId, {
|
||||
...config,
|
||||
...updates,
|
||||
});
|
||||
|
||||
// currentJobId will be cleared or remain as is in memory.
|
||||
// saveConfigs will persist this state.
|
||||
await this.saveConfigs();
|
||||
} else {
|
||||
this.logger.warn(`Config ${configId} not found when trying to handle job completion.`);
|
||||
}
|
||||
}
|
||||
|
||||
private scheduleJob(config: BackupJobConfig): void {
|
||||
try {
|
||||
const job = new CronJob(
|
||||
config.schedule,
|
||||
() => this.executeBackupJob(config),
|
||||
null,
|
||||
false,
|
||||
'UTC'
|
||||
);
|
||||
|
||||
this.schedulerRegistry.addCronJob(getBackupJobGroupId(config.id), job);
|
||||
job.start();
|
||||
this.logger.log(`Scheduled backup job: ${config.name} with schedule: ${config.schedule}`);
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to schedule backup job ${config.name}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
private unscheduleJob(id: string): void {
|
||||
try {
|
||||
const jobName = getBackupJobGroupId(id);
|
||||
if (this.schedulerRegistry.doesExist('cron', jobName)) {
|
||||
this.schedulerRegistry.deleteCronJob(jobName);
|
||||
this.logger.log(`Unscheduled backup job: ${id}`);
|
||||
} else {
|
||||
this.logger.debug(`No existing cron job found to unschedule for backup job: ${id}`);
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to unschedule backup job ${id}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
private async loadConfigs(): Promise<void> {
|
||||
try {
|
||||
if (existsSync(this.configPath)) {
|
||||
const data = await readFile(this.configPath, 'utf-8');
|
||||
const configs: BackupJobConfig[] = JSON.parse(data);
|
||||
|
||||
// First, unschedule any existing jobs before clearing the config map
|
||||
this.configs.forEach((config) => {
|
||||
if (config.enabled) {
|
||||
this.unscheduleJob(config.id);
|
||||
}
|
||||
});
|
||||
|
||||
this.configs.clear();
|
||||
configs.forEach((config) => {
|
||||
// Transform plain objects back into class instances
|
||||
const transformedConfig = {
|
||||
...config,
|
||||
sourceConfig: this.transformPlainObjectToSourceConfig(
|
||||
config.sourceConfig,
|
||||
config.sourceType
|
||||
),
|
||||
destinationConfig: this.transformPlainObjectToDestinationConfig(
|
||||
config.destinationConfig,
|
||||
config.destinationType
|
||||
),
|
||||
};
|
||||
|
||||
this.configs.set(config.id, transformedConfig);
|
||||
if (transformedConfig.enabled) {
|
||||
this.scheduleJob(transformedConfig);
|
||||
}
|
||||
});
|
||||
|
||||
this.logger.log(`Loaded ${configs.length} backup job configurations`);
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to load backup configurations:', error);
|
||||
}
|
||||
}
|
||||
|
||||
private async saveConfigs(): Promise<void> {
|
||||
try {
|
||||
// Create a deep copy of configs for saving, stripping currentJobId
|
||||
const configsToSave: BackupJobConfig[] = [];
|
||||
for (const config of this.configs.values()) {
|
||||
const { currentJobId, ...restOfConfig } = config; // Destructure to remove currentJobId
|
||||
configsToSave.push(restOfConfig as BackupJobConfig); // Cast needed if TS complains
|
||||
}
|
||||
await writeFile(this.configPath, JSON.stringify(configsToSave, null, 2));
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to save backup configurations:', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,313 @@
|
||||
import { Logger } from '@nestjs/common';
|
||||
import { Args, ResolveField, Resolver } from '@nestjs/graphql';
|
||||
|
||||
import { Resource } from '@unraid/shared/graphql.model';
|
||||
import { PrefixedID } from '@unraid/shared/prefixed-id-scalar';
|
||||
import {
|
||||
AuthActionVerb,
|
||||
AuthPossession,
|
||||
UsePermissions,
|
||||
} from '@unraid/shared/use-permissions.directive.js';
|
||||
|
||||
import { BackupConfigService } from '@app/unraid-api/graph/resolvers/backup/backup-config.service.js';
|
||||
import {
|
||||
BackupJobConfig,
|
||||
BackupStatus,
|
||||
CreateBackupJobConfigInput,
|
||||
InitiateBackupInput,
|
||||
UpdateBackupJobConfigInput,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/backup.model.js';
|
||||
import { BackupOrchestrationService } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-orchestration.service.js';
|
||||
import { BackupMutations } from '@app/unraid-api/graph/resolvers/mutation/mutation.model.js';
|
||||
import { RCloneService } from '@app/unraid-api/graph/resolvers/rclone/rclone.service.js';
|
||||
|
||||
@Resolver(() => BackupMutations)
|
||||
export class BackupMutationsResolver {
|
||||
private readonly logger = new Logger(BackupMutationsResolver.name);
|
||||
|
||||
constructor(
|
||||
private readonly backupConfigService: BackupConfigService,
|
||||
private readonly rcloneService: RCloneService,
|
||||
private readonly backupOrchestrationService: BackupOrchestrationService
|
||||
) {}
|
||||
|
||||
private async executeBackup(
|
||||
sourcePath: string,
|
||||
remoteName: string,
|
||||
destinationPath: string,
|
||||
options: Record<string, any> = {},
|
||||
configId?: string
|
||||
): Promise<BackupStatus> {
|
||||
try {
|
||||
this.logger.log(`Executing backup: ${sourcePath} -> ${remoteName}:${destinationPath}`);
|
||||
|
||||
// Create a temporary config for the orchestration service
|
||||
const tempConfig: BackupJobConfig = {
|
||||
id: configId || `temp-${Date.now()}`,
|
||||
name: `Manual backup to ${remoteName}`,
|
||||
sourceType: 'raw' as any,
|
||||
destinationType: 'rclone' as any,
|
||||
schedule: '',
|
||||
enabled: true,
|
||||
sourceConfig: {
|
||||
type: 'raw',
|
||||
sourcePath: sourcePath,
|
||||
} as any,
|
||||
destinationConfig: {
|
||||
type: 'rclone',
|
||||
remoteName: remoteName,
|
||||
destinationPath: destinationPath,
|
||||
options: options,
|
||||
} as any,
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
const jobId = tempConfig.id;
|
||||
|
||||
// Use the orchestration service for execution
|
||||
await this.backupOrchestrationService.executeBackupJob(tempConfig, jobId);
|
||||
|
||||
this.logger.log(`Backup job initiated successfully with ID: ${jobId}`);
|
||||
|
||||
return {
|
||||
status: 'Backup initiated successfully',
|
||||
jobId: jobId,
|
||||
};
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
this.logger.error(
|
||||
`Failed to execute backup: ${errorMessage}`,
|
||||
error instanceof Error ? error.stack : undefined
|
||||
);
|
||||
|
||||
return {
|
||||
status: `Failed to initiate backup: ${errorMessage}`,
|
||||
jobId: undefined,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@ResolveField(() => BackupJobConfig, {
|
||||
description: 'Create a new backup job configuration',
|
||||
})
|
||||
@UsePermissions({
|
||||
action: AuthActionVerb.CREATE,
|
||||
resource: Resource.BACKUP,
|
||||
possession: AuthPossession.ANY,
|
||||
})
|
||||
async createBackupJobConfig(
|
||||
@Args('input') input: CreateBackupJobConfigInput
|
||||
): Promise<BackupJobConfig> {
|
||||
return this.backupConfigService.createBackupJobConfig(input);
|
||||
}
|
||||
|
||||
@ResolveField(() => BackupJobConfig, {
|
||||
description: 'Update a backup job configuration',
|
||||
nullable: true,
|
||||
})
|
||||
@UsePermissions({
|
||||
action: AuthActionVerb.UPDATE,
|
||||
resource: Resource.BACKUP,
|
||||
possession: AuthPossession.ANY,
|
||||
})
|
||||
async updateBackupJobConfig(
|
||||
@Args('id', { type: () => PrefixedID }) id: string,
|
||||
@Args('input') input: UpdateBackupJobConfigInput
|
||||
): Promise<BackupJobConfig | null> {
|
||||
return this.backupConfigService.updateBackupJobConfig(id, input);
|
||||
}
|
||||
|
||||
@ResolveField(() => Boolean, {
|
||||
description: 'Delete a backup job configuration',
|
||||
})
|
||||
@UsePermissions({
|
||||
action: AuthActionVerb.DELETE,
|
||||
resource: Resource.BACKUP,
|
||||
possession: AuthPossession.ANY,
|
||||
})
|
||||
async deleteBackupJobConfig(@Args('id', { type: () => PrefixedID }) id: string): Promise<boolean> {
|
||||
return this.backupConfigService.deleteBackupJobConfig(id);
|
||||
}
|
||||
|
||||
@ResolveField(() => BackupStatus, {
|
||||
description: 'Initiates a backup using a configured remote.',
|
||||
})
|
||||
@UsePermissions({
|
||||
action: AuthActionVerb.CREATE,
|
||||
resource: Resource.BACKUP,
|
||||
possession: AuthPossession.ANY,
|
||||
})
|
||||
async initiateBackup(@Args('input') input: InitiateBackupInput): Promise<BackupStatus> {
|
||||
return this.executeBackup(
|
||||
input.sourcePath,
|
||||
input.remoteName,
|
||||
input.destinationPath,
|
||||
input.options || {}
|
||||
);
|
||||
}
|
||||
|
||||
@ResolveField(() => BackupJobConfig, {
|
||||
description: 'Toggle a backup job configuration enabled/disabled',
|
||||
nullable: true,
|
||||
})
|
||||
@UsePermissions({
|
||||
action: AuthActionVerb.UPDATE,
|
||||
resource: Resource.BACKUP,
|
||||
possession: AuthPossession.ANY,
|
||||
})
|
||||
async toggleJobConfig(
|
||||
@Args('id', { type: () => PrefixedID }) id: string
|
||||
): Promise<BackupJobConfig | null> {
|
||||
const existing = await this.backupConfigService.getBackupJobConfig(id);
|
||||
if (!existing) return null;
|
||||
|
||||
return this.backupConfigService.updateBackupJobConfig(id, {
|
||||
enabled: !existing.enabled,
|
||||
});
|
||||
}
|
||||
|
||||
@ResolveField(() => BackupStatus, {
|
||||
description: 'Manually trigger a backup job using existing configuration',
|
||||
})
|
||||
@UsePermissions({
|
||||
action: AuthActionVerb.CREATE,
|
||||
resource: Resource.BACKUP,
|
||||
possession: AuthPossession.ANY,
|
||||
})
|
||||
async triggerJob(@Args('id', { type: () => PrefixedID }) id: string): Promise<BackupStatus> {
|
||||
const config = await this.backupConfigService.getBackupJobConfig(id);
|
||||
if (!config) {
|
||||
return {
|
||||
status: 'Failed to trigger backup: Configuration not found',
|
||||
jobId: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
// Use the orchestration service to execute the backup job
|
||||
await this.backupOrchestrationService.executeBackupJob(config, config.id);
|
||||
|
||||
// Update the config with job start information
|
||||
await this.backupConfigService.updateBackupJobConfig(id, {
|
||||
lastRunStatus: `Started with job ID: ${config.id}`,
|
||||
lastRunAt: new Date().toISOString(),
|
||||
});
|
||||
|
||||
return {
|
||||
status: 'Backup job triggered successfully',
|
||||
jobId: config.id,
|
||||
};
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
this.logger.error(`Failed to trigger backup job ${id}: ${errorMessage}`);
|
||||
|
||||
await this.backupConfigService.updateBackupJobConfig(id, {
|
||||
lastRunStatus: `Failed: ${errorMessage}`,
|
||||
lastRunAt: new Date().toISOString(),
|
||||
});
|
||||
|
||||
return {
|
||||
status: `Failed to trigger backup: ${errorMessage}`,
|
||||
jobId: undefined,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@ResolveField(() => BackupStatus, {
|
||||
description: 'Stop all running backup jobs',
|
||||
})
|
||||
@UsePermissions({
|
||||
action: AuthActionVerb.DELETE,
|
||||
resource: Resource.BACKUP,
|
||||
possession: AuthPossession.ANY,
|
||||
})
|
||||
async stopAllBackupJobs(): Promise<BackupStatus> {
|
||||
try {
|
||||
const result = await this.rcloneService['rcloneApiService'].stopAllJobs();
|
||||
const stoppedCount = result.stopped.length;
|
||||
const errorCount = result.errors.length;
|
||||
|
||||
if (stoppedCount > 0) {
|
||||
this.logger.log(`Stopped ${stoppedCount} backup jobs`);
|
||||
}
|
||||
|
||||
if (errorCount > 0) {
|
||||
this.logger.warn(`Failed operations on ${errorCount} jobs: ${result.errors.join(', ')}`);
|
||||
}
|
||||
|
||||
return {
|
||||
status: `Stopped ${stoppedCount} jobs${errorCount > 0 ? `, ${errorCount} errors` : ''}`,
|
||||
jobId: undefined,
|
||||
};
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
this.logger.error(`Failed to stop backup jobs: ${errorMessage}`);
|
||||
return {
|
||||
status: `Failed to stop backup jobs: ${errorMessage}`,
|
||||
jobId: undefined,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@ResolveField(() => BackupStatus, {
|
||||
description: 'Stop a specific backup job',
|
||||
})
|
||||
@UsePermissions({
|
||||
action: AuthActionVerb.DELETE,
|
||||
resource: Resource.BACKUP,
|
||||
possession: AuthPossession.ANY,
|
||||
})
|
||||
async stopBackupJob(@Args('id', { type: () => PrefixedID }) id: string): Promise<BackupStatus> {
|
||||
try {
|
||||
const result = await this.rcloneService['rcloneApiService'].stopJob(id);
|
||||
const stoppedCount = result.stopped.length;
|
||||
const errorCount = result.errors.length;
|
||||
|
||||
if (stoppedCount > 0) {
|
||||
this.logger.log(`Stopped backup job: ${id}`);
|
||||
}
|
||||
|
||||
if (errorCount > 0) {
|
||||
this.logger.warn(`Failed to stop job ${id}: ${result.errors.join(', ')}`);
|
||||
}
|
||||
|
||||
return {
|
||||
status: stoppedCount > 0 ? `Stopped job ${id}` : `Failed to stop job ${id}`,
|
||||
jobId: stoppedCount > 0 ? id : undefined,
|
||||
};
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
this.logger.error(`Failed to stop backup job ${id}: ${errorMessage}`);
|
||||
return {
|
||||
status: `Failed to stop backup job: ${errorMessage}`,
|
||||
jobId: undefined,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@ResolveField(() => BackupStatus, {
|
||||
description: 'Forget all finished backup jobs to clean up the job list',
|
||||
})
|
||||
@UsePermissions({
|
||||
action: AuthActionVerb.DELETE,
|
||||
resource: Resource.BACKUP,
|
||||
possession: AuthPossession.ANY,
|
||||
})
|
||||
async forgetFinishedBackupJobs(): Promise<BackupStatus> {
|
||||
try {
|
||||
this.logger.log('Forgetting finished backup jobs is handled automatically by RClone');
|
||||
return {
|
||||
status: 'Finished jobs are automatically cleaned up by RClone',
|
||||
jobId: undefined,
|
||||
};
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
this.logger.error(`Failed to forget finished backup jobs: ${errorMessage}`);
|
||||
return {
|
||||
status: `Failed to forget finished backup jobs: ${errorMessage}`,
|
||||
jobId: undefined,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
226
api/src/unraid-api/graph/resolvers/backup/backup.model.ts
Normal file
226
api/src/unraid-api/graph/resolvers/backup/backup.model.ts
Normal file
@@ -0,0 +1,226 @@
|
||||
import { Field, InputType, ObjectType } from '@nestjs/graphql';
|
||||
|
||||
import { type Layout } from '@jsonforms/core';
|
||||
import { Node } from '@unraid/shared/graphql.model.js';
|
||||
import { PrefixedID } from '@unraid/shared/prefixed-id-scalar.js';
|
||||
import {
|
||||
IsBoolean,
|
||||
IsNotEmpty,
|
||||
IsObject,
|
||||
IsOptional,
|
||||
IsString,
|
||||
Matches,
|
||||
ValidateIf,
|
||||
ValidateNested,
|
||||
} from 'class-validator';
|
||||
import { DateTimeISOResolver, GraphQLJSON } from 'graphql-scalars';
|
||||
|
||||
import {
|
||||
DestinationConfigInput,
|
||||
DestinationConfigInputUnion,
|
||||
DestinationConfigUnion,
|
||||
DestinationType,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/destination/backup-destination.types.js';
|
||||
import { JobStatus } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-status.model.js';
|
||||
import {
|
||||
SourceConfigInput,
|
||||
SourceConfigInputUnion,
|
||||
SourceConfigUnion,
|
||||
SourceType,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/source/backup-source.types.js';
|
||||
import { DataSlice } from '@app/unraid-api/types/json-forms.js';
|
||||
|
||||
@ObjectType({
|
||||
implements: () => Node,
|
||||
})
|
||||
export class Backup extends Node {
|
||||
@Field(() => [JobStatus])
|
||||
jobs!: JobStatus[];
|
||||
|
||||
@Field(() => [BackupJobConfig])
|
||||
configs!: BackupJobConfig[];
|
||||
}
|
||||
|
||||
@InputType()
|
||||
export class InitiateBackupInput {
|
||||
@Field(() => String, { description: 'The name of the remote configuration to use for the backup.' })
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
remoteName!: string;
|
||||
|
||||
@Field(() => String, { description: 'Source path to backup.' })
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
sourcePath!: string;
|
||||
|
||||
@Field(() => String, { description: 'Destination path on the remote.' })
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
destinationPath!: string;
|
||||
|
||||
@Field(() => GraphQLJSON, {
|
||||
description: 'Additional options for the backup operation, such as --dry-run or --transfers.',
|
||||
nullable: true,
|
||||
})
|
||||
@IsOptional()
|
||||
@IsObject()
|
||||
options?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
@ObjectType()
|
||||
export class BackupStatus {
|
||||
@Field(() => String, {
|
||||
description: 'Status message indicating the outcome of the backup initiation.',
|
||||
})
|
||||
status!: string;
|
||||
|
||||
@Field(() => String, {
|
||||
description: 'Job ID if available, can be used to check job status.',
|
||||
nullable: true,
|
||||
})
|
||||
jobId?: string;
|
||||
}
|
||||
|
||||
@ObjectType()
|
||||
export class RCloneWebGuiInfo {
|
||||
@Field()
|
||||
url!: string;
|
||||
}
|
||||
|
||||
@ObjectType({
|
||||
implements: () => Node,
|
||||
})
|
||||
export class BackupJobConfig extends Node {
|
||||
@Field(() => String, { description: 'Human-readable name for this backup job' })
|
||||
name!: string;
|
||||
|
||||
@Field(() => SourceType, { description: 'Type of the backup source' })
|
||||
sourceType!: SourceType;
|
||||
|
||||
@Field(() => DestinationType, { description: 'Type of the backup destination' })
|
||||
destinationType!: DestinationType;
|
||||
|
||||
@Field(() => String, {
|
||||
description: 'Cron schedule expression (e.g., "0 2 * * *" for daily at 2AM)',
|
||||
})
|
||||
schedule!: string;
|
||||
|
||||
@Field(() => Boolean, { description: 'Whether this backup job is enabled' })
|
||||
enabled!: boolean;
|
||||
|
||||
@Field(() => SourceConfigUnion, { description: 'Source configuration for this backup job' })
|
||||
sourceConfig!: typeof SourceConfigUnion;
|
||||
|
||||
@Field(() => DestinationConfigUnion, {
|
||||
description: 'Destination configuration for this backup job',
|
||||
})
|
||||
destinationConfig!: typeof DestinationConfigUnion;
|
||||
|
||||
@Field(() => DateTimeISOResolver, { description: 'When this config was created' })
|
||||
createdAt!: string;
|
||||
|
||||
@Field(() => DateTimeISOResolver, { description: 'When this config was last updated' })
|
||||
updatedAt!: string;
|
||||
|
||||
@Field(() => DateTimeISOResolver, { description: 'Last time this job ran', nullable: true })
|
||||
lastRunAt?: string;
|
||||
|
||||
@Field(() => String, { description: 'Status of last run', nullable: true })
|
||||
lastRunStatus?: string;
|
||||
|
||||
@Field(() => String, { description: 'Current running job ID for this config', nullable: true })
|
||||
currentJobId?: string;
|
||||
}
|
||||
|
||||
@InputType()
|
||||
export class BaseBackupJobConfigInput {
|
||||
@Field(() => String, { nullable: true })
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
name?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
@ValidateIf((o) => o.schedule && o.schedule.length > 0)
|
||||
@Matches(
|
||||
/^(\*|[0-5]?\d)(\s+(\*|[0-1]?\d|2[0-3]))(\s+(\*|[1-2]?\d|3[0-1]))(\s+(\*|[1-9]|1[0-2]))(\s+(\*|[0-6]))$/,
|
||||
{
|
||||
message: 'schedule must be a valid cron expression',
|
||||
}
|
||||
)
|
||||
schedule?: string;
|
||||
|
||||
@Field(() => Boolean, { nullable: true })
|
||||
@IsOptional()
|
||||
@IsBoolean()
|
||||
enabled?: boolean;
|
||||
|
||||
@Field(() => SourceConfigInputUnion, {
|
||||
description: 'Source configuration for this backup job',
|
||||
nullable: true,
|
||||
})
|
||||
@IsOptional()
|
||||
@ValidateNested()
|
||||
sourceConfig?: SourceConfigInput;
|
||||
|
||||
@Field(() => DestinationConfigInputUnion, {
|
||||
description: 'Destination configuration for this backup job',
|
||||
nullable: true,
|
||||
})
|
||||
@IsOptional()
|
||||
@ValidateNested()
|
||||
destinationConfig?: DestinationConfigInput;
|
||||
}
|
||||
|
||||
@InputType()
|
||||
export class CreateBackupJobConfigInput extends BaseBackupJobConfigInput {
|
||||
@Field(() => String)
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
declare name: string;
|
||||
|
||||
@Field(() => Boolean, { defaultValue: true })
|
||||
@IsBoolean()
|
||||
@ValidateIf((o) => o.schedule && o.schedule.length > 0)
|
||||
declare enabled: boolean;
|
||||
}
|
||||
|
||||
@InputType()
|
||||
export class UpdateBackupJobConfigInput extends BaseBackupJobConfigInput {
|
||||
@Field(() => String, { nullable: true })
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
lastRunStatus?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
lastRunAt?: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
currentJobId?: string;
|
||||
}
|
||||
|
||||
@ObjectType()
|
||||
export class BackupJobConfigForm {
|
||||
@Field(() => PrefixedID)
|
||||
id!: string;
|
||||
|
||||
@Field(() => GraphQLJSON)
|
||||
dataSchema!: { properties: DataSlice; type: 'object' };
|
||||
|
||||
@Field(() => GraphQLJSON)
|
||||
uiSchema!: Layout;
|
||||
}
|
||||
|
||||
@InputType()
|
||||
export class BackupJobConfigFormInput {
|
||||
@Field(() => Boolean, { defaultValue: false })
|
||||
@IsOptional()
|
||||
@IsBoolean()
|
||||
showAdvanced?: boolean;
|
||||
}
|
||||
30
api/src/unraid-api/graph/resolvers/backup/backup.module.ts
Normal file
30
api/src/unraid-api/graph/resolvers/backup/backup.module.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import { forwardRef, Module } from '@nestjs/common';
|
||||
import { ScheduleModule } from '@nestjs/schedule';
|
||||
|
||||
import { BackupConfigService } from '@app/unraid-api/graph/resolvers/backup/backup-config.service.js';
|
||||
import { BackupMutationsResolver } from '@app/unraid-api/graph/resolvers/backup/backup-mutations.resolver.js';
|
||||
import {
|
||||
BackupJobConfigResolver,
|
||||
BackupResolver,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/backup.resolver.js';
|
||||
import { BackupDestinationModule } from '@app/unraid-api/graph/resolvers/backup/destination/backup-destination.module.js';
|
||||
import { BackupJobStatusResolver } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-status.resolver.js';
|
||||
import { BackupJobTrackingService } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-tracking.service.js';
|
||||
import { BackupOrchestrationService } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-orchestration.service.js';
|
||||
import { BackupSourceModule } from '@app/unraid-api/graph/resolvers/backup/source/backup-source.module.js';
|
||||
import { RCloneModule } from '@app/unraid-api/graph/resolvers/rclone/rclone.module.js';
|
||||
|
||||
@Module({
|
||||
imports: [RCloneModule, ScheduleModule.forRoot(), BackupSourceModule, BackupDestinationModule],
|
||||
providers: [
|
||||
BackupResolver,
|
||||
BackupJobConfigResolver,
|
||||
BackupMutationsResolver,
|
||||
BackupConfigService,
|
||||
BackupOrchestrationService,
|
||||
BackupJobTrackingService,
|
||||
BackupJobStatusResolver,
|
||||
],
|
||||
exports: [forwardRef(() => BackupOrchestrationService), BackupJobTrackingService],
|
||||
})
|
||||
export class BackupModule {}
|
||||
131
api/src/unraid-api/graph/resolvers/backup/backup.resolver.ts
Normal file
131
api/src/unraid-api/graph/resolvers/backup/backup.resolver.ts
Normal file
@@ -0,0 +1,131 @@
|
||||
import { Logger } from '@nestjs/common';
|
||||
import { Args, Parent, Query, ResolveField, Resolver } from '@nestjs/graphql';
|
||||
|
||||
import { PrefixedID } from '@unraid/shared/prefixed-id-scalar.js';
|
||||
|
||||
import { BackupConfigService } from '@app/unraid-api/graph/resolvers/backup/backup-config.service.js';
|
||||
import {
|
||||
Backup,
|
||||
BackupJobConfig,
|
||||
BackupJobConfigForm,
|
||||
BackupJobConfigFormInput,
|
||||
BackupStatus,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/backup.model.js';
|
||||
import { buildBackupJobConfigSchema } from '@app/unraid-api/graph/resolvers/backup/jsonforms/backup-jsonforms-config.js';
|
||||
import { JobStatus } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-status.model.js';
|
||||
import { BackupJobTrackingService } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-tracking.service.js';
|
||||
import { RCloneService } from '@app/unraid-api/graph/resolvers/rclone/rclone.service.js';
|
||||
import { FormatService } from '@app/unraid-api/utils/format.service.js';
|
||||
|
||||
@Resolver(() => Backup)
|
||||
export class BackupResolver {
|
||||
private readonly logger = new Logger(BackupResolver.name);
|
||||
|
||||
constructor(
|
||||
private readonly rcloneService: RCloneService,
|
||||
private readonly backupConfigService: BackupConfigService,
|
||||
private readonly formatService: FormatService,
|
||||
private readonly backupJobTrackingService: BackupJobTrackingService
|
||||
) {}
|
||||
|
||||
@Query(() => Backup, {
|
||||
description: 'Get backup service information',
|
||||
})
|
||||
async backup(): Promise<Backup> {
|
||||
return {
|
||||
id: 'backup',
|
||||
jobs: [],
|
||||
configs: [],
|
||||
};
|
||||
}
|
||||
|
||||
@ResolveField(() => [JobStatus], {
|
||||
description: 'Get all running backup jobs',
|
||||
})
|
||||
async jobs(): Promise<JobStatus[]> {
|
||||
return this.backupJobTrackingService.getAllJobStatuses();
|
||||
}
|
||||
|
||||
@ResolveField(() => [BackupJobConfig], {
|
||||
description: 'Get all backup job configurations',
|
||||
})
|
||||
async configs(): Promise<BackupJobConfig[]> {
|
||||
return this.backupConfigService.getAllBackupJobConfigs();
|
||||
}
|
||||
|
||||
@Query(() => BackupJobConfig, {
|
||||
description: 'Get a specific backup job configuration',
|
||||
nullable: true,
|
||||
})
|
||||
async backupJobConfig(
|
||||
@Args('id', { type: () => PrefixedID }) id: string
|
||||
): Promise<BackupJobConfig | null> {
|
||||
return this.backupConfigService.getBackupJobConfig(id);
|
||||
}
|
||||
|
||||
@Query(() => JobStatus, {
|
||||
description: 'Get status of a specific backup job',
|
||||
nullable: true,
|
||||
})
|
||||
async backupJob(@Args('id', { type: () => PrefixedID }) id: string): Promise<JobStatus | null> {
|
||||
return this.backupJobTrackingService.getJobStatus(id) || null;
|
||||
}
|
||||
|
||||
@ResolveField(() => BackupStatus, {
|
||||
description: 'Get the status for the backup service',
|
||||
})
|
||||
async status(): Promise<BackupStatus> {
|
||||
return {
|
||||
status: 'Available',
|
||||
jobId: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
@Query(() => BackupJobConfigForm, {
|
||||
description: 'Get the JSON schema for backup job configuration form',
|
||||
})
|
||||
async backupJobConfigForm(
|
||||
@Args('input', { nullable: true }) input?: BackupJobConfigFormInput
|
||||
): Promise<BackupJobConfigForm> {
|
||||
const remotes = await this.rcloneService.getRemoteDetails();
|
||||
|
||||
const { dataSchema, uiSchema } = buildBackupJobConfigSchema({
|
||||
remotes,
|
||||
});
|
||||
|
||||
return {
|
||||
id: 'backup-job-config-form',
|
||||
dataSchema,
|
||||
uiSchema,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@Resolver(() => BackupJobConfig)
|
||||
export class BackupJobConfigResolver {
|
||||
private readonly logger = new Logger(BackupJobConfigResolver.name);
|
||||
|
||||
constructor(private readonly backupJobTrackingService: BackupJobTrackingService) {}
|
||||
|
||||
@ResolveField(() => JobStatus, {
|
||||
description: 'Get the current running job for this backup config',
|
||||
nullable: true,
|
||||
})
|
||||
async currentJob(@Parent() config: BackupJobConfig): Promise<JobStatus | null> {
|
||||
if (!config.currentJobId) {
|
||||
return null;
|
||||
}
|
||||
|
||||
this.logger.debug(
|
||||
`Looking for current job for config ${config.id} using currentJobId: ${config.currentJobId}`
|
||||
);
|
||||
|
||||
const jobStatus = this.backupJobTrackingService.getJobStatus(config.currentJobId);
|
||||
if (!jobStatus) {
|
||||
this.logger.debug(`No job status found for job ID: ${config.currentJobId}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
return jobStatus as JobStatus;
|
||||
}
|
||||
}
|
||||
32
api/src/unraid-api/graph/resolvers/backup/backup.utils.ts
Normal file
32
api/src/unraid-api/graph/resolvers/backup/backup.utils.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
export const BACKUP_JOB_GROUP_PREFIX = 'backup-';
|
||||
|
||||
/**
|
||||
* Generates the group ID for a backup job based on its configuration ID.
|
||||
* This group ID is used by RClone to group related backup operations.
|
||||
* @param configId The ID of the backup job configuration.
|
||||
* @returns The RClone group ID string.
|
||||
*/
|
||||
export function getBackupJobGroupId(configId: string): string {
|
||||
return `${BACKUP_JOB_GROUP_PREFIX}${configId}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the configuration ID from a backup job group ID.
|
||||
* @param groupId The RClone group ID string (e.g., "backup-someConfigId").
|
||||
* @returns The configuration ID if the group ID is valid and prefixed, otherwise undefined.
|
||||
*/
|
||||
export function getConfigIdFromGroupId(groupId: string): string | undefined {
|
||||
if (groupId.startsWith(BACKUP_JOB_GROUP_PREFIX)) {
|
||||
return groupId.substring(BACKUP_JOB_GROUP_PREFIX.length);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the given ID corresponds to a backup job group.
|
||||
* @param id The ID string to check (can be a job ID or a group ID).
|
||||
* @returns True if the ID represents a backup job group, false otherwise.
|
||||
*/
|
||||
export function isBackupJobGroup(id: string): boolean {
|
||||
return id.startsWith(BACKUP_JOB_GROUP_PREFIX);
|
||||
}
|
||||
@@ -0,0 +1,180 @@
|
||||
import type { LabelElement, SchemaBasedCondition } from '@jsonforms/core';
|
||||
import { JsonSchema7, RuleEffect } from '@jsonforms/core';
|
||||
|
||||
import type { RCloneRemote } from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
|
||||
import type { SettingSlice, UIElement } from '@app/unraid-api/types/json-forms.js';
|
||||
import { DestinationType } from '@app/unraid-api/graph/resolvers/backup/destination/backup-destination.types.js';
|
||||
import { createLabeledControl } from '@app/unraid-api/graph/utils/form-utils.js';
|
||||
|
||||
export function getDestinationConfigSlice({ remotes = [] }: { remotes?: RCloneRemote[] }): SettingSlice {
|
||||
const destinationConfigElements: UIElement[] = [
|
||||
{
|
||||
type: 'Control',
|
||||
scope: '#/properties/destinationConfig/properties/type',
|
||||
options: {
|
||||
format: 'radio',
|
||||
radioLayout: 'horizontal',
|
||||
options: [
|
||||
{
|
||||
label: 'RClone Remote',
|
||||
value: DestinationType.RCLONE,
|
||||
description: 'Backup to cloud storage via RClone',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
|
||||
// RClone Configuration
|
||||
{
|
||||
type: 'VerticalLayout',
|
||||
rule: {
|
||||
effect: RuleEffect.SHOW,
|
||||
condition: {
|
||||
scope: '#/properties/destinationConfig/properties/type',
|
||||
schema: { const: DestinationType.RCLONE },
|
||||
} as SchemaBasedCondition,
|
||||
},
|
||||
elements: [
|
||||
{
|
||||
type: 'Label',
|
||||
text: 'RClone Configuration',
|
||||
options: {
|
||||
description: 'Configure RClone remote destination settings.',
|
||||
},
|
||||
} as LabelElement,
|
||||
|
||||
createLabeledControl({
|
||||
scope: '#/properties/destinationConfig/properties/rcloneConfig/properties/remoteName',
|
||||
label: 'Remote Configuration',
|
||||
description: 'Select the RClone remote configuration to use for this backup',
|
||||
controlOptions: {
|
||||
suggestions: remotes.map((remote) => ({
|
||||
value: remote.name,
|
||||
label: `${remote.name} (${remote.type})`,
|
||||
})),
|
||||
},
|
||||
}),
|
||||
|
||||
createLabeledControl({
|
||||
scope: '#/properties/destinationConfig/properties/rcloneConfig/properties/destinationPath',
|
||||
label: 'Destination Path',
|
||||
description:
|
||||
'The path on the remote where files will be stored (e.g., backups/documents)',
|
||||
controlOptions: {
|
||||
placeholder: 'backups/',
|
||||
format: 'string',
|
||||
},
|
||||
}),
|
||||
|
||||
createLabeledControl({
|
||||
scope: '#/properties/destinationConfig/properties/rcloneConfig/properties/rcloneOptions/properties/transfers',
|
||||
label: 'Number of Transfers',
|
||||
description: 'Number of file transfers to run in parallel (default: 4)',
|
||||
controlOptions: {
|
||||
placeholder: '4',
|
||||
format: 'number',
|
||||
},
|
||||
}),
|
||||
|
||||
createLabeledControl({
|
||||
scope: '#/properties/destinationConfig/properties/rcloneConfig/properties/rcloneOptions/properties/checkers',
|
||||
label: 'Number of Checkers',
|
||||
description: 'Number of checkers to run in parallel (default: 8)',
|
||||
controlOptions: {
|
||||
placeholder: '8',
|
||||
format: 'number',
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const destinationConfigProperties: Record<string, JsonSchema7> = {
|
||||
destinationConfig: {
|
||||
type: 'object',
|
||||
title: 'Destination Configuration',
|
||||
description: 'Configuration for backup destination',
|
||||
properties: {
|
||||
type: {
|
||||
type: 'string',
|
||||
title: 'Destination Type',
|
||||
description: 'Type of destination to use for backup',
|
||||
enum: [DestinationType.RCLONE],
|
||||
default: DestinationType.RCLONE,
|
||||
},
|
||||
rcloneConfig: {
|
||||
type: 'object',
|
||||
title: 'RClone Configuration',
|
||||
properties: {
|
||||
remoteName: {
|
||||
type: 'string',
|
||||
title: 'Remote Name',
|
||||
description: 'Remote name from rclone config',
|
||||
enum:
|
||||
remotes.length > 0
|
||||
? remotes.map((remote) => remote.name)
|
||||
: ['No remotes configured'],
|
||||
},
|
||||
destinationPath: {
|
||||
type: 'string',
|
||||
title: 'Destination Path',
|
||||
description: 'Destination path on the remote',
|
||||
minLength: 1,
|
||||
},
|
||||
rcloneOptions: {
|
||||
type: 'object',
|
||||
title: 'RClone Options',
|
||||
description: 'Advanced RClone configuration options',
|
||||
properties: {
|
||||
transfers: {
|
||||
type: 'integer',
|
||||
title: 'Transfers',
|
||||
description: 'Number of file transfers to run in parallel',
|
||||
minimum: 1,
|
||||
maximum: 100,
|
||||
default: 4,
|
||||
},
|
||||
checkers: {
|
||||
type: 'integer',
|
||||
title: 'Checkers',
|
||||
description: 'Number of checkers to run in parallel',
|
||||
minimum: 1,
|
||||
maximum: 100,
|
||||
default: 8,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
required: ['remoteName', 'destinationPath'],
|
||||
},
|
||||
},
|
||||
required: ['type'],
|
||||
},
|
||||
};
|
||||
|
||||
// Apply conditional logic for destinationConfig
|
||||
if (
|
||||
destinationConfigProperties.destinationConfig &&
|
||||
typeof destinationConfigProperties.destinationConfig === 'object'
|
||||
) {
|
||||
destinationConfigProperties.destinationConfig.allOf = [
|
||||
{
|
||||
if: { properties: { type: { const: DestinationType.RCLONE } }, required: ['type'] },
|
||||
then: {
|
||||
required: ['rcloneConfig'],
|
||||
},
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
const verticalLayoutElement: UIElement = {
|
||||
type: 'VerticalLayout',
|
||||
elements: destinationConfigElements,
|
||||
options: { step: 2 },
|
||||
};
|
||||
|
||||
return {
|
||||
properties: destinationConfigProperties,
|
||||
elements: [verticalLayoutElement],
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,59 @@
|
||||
import { Writable } from 'stream';
|
||||
|
||||
import { DestinationType } from '@app/unraid-api/graph/resolvers/backup/destination/backup-destination.types.js';
|
||||
|
||||
export interface BackupDestinationConfig {
|
||||
timeout: number;
|
||||
cleanupOnFailure: boolean;
|
||||
useStreaming?: boolean;
|
||||
supportsStreaming?: boolean;
|
||||
}
|
||||
|
||||
export interface BackupDestinationResult {
|
||||
success: boolean;
|
||||
destinationPath?: string;
|
||||
uploadedBytes?: number;
|
||||
error?: string;
|
||||
cleanupRequired?: boolean;
|
||||
metadata?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
export interface StreamingDestinationHandle {
|
||||
stream: Writable;
|
||||
completionPromise: Promise<BackupDestinationResult>;
|
||||
}
|
||||
|
||||
export interface BackupDestinationProcessorOptions {
|
||||
jobId?: string;
|
||||
onProgress?: (progress: number) => void;
|
||||
onOutput?: (data: string) => void;
|
||||
onError?: (error: string) => void;
|
||||
}
|
||||
|
||||
export abstract class BackupDestinationProcessor<TConfig extends BackupDestinationConfig> {
|
||||
abstract readonly destinationType: DestinationType;
|
||||
|
||||
abstract execute(
|
||||
sourcePath: string,
|
||||
config: TConfig,
|
||||
options?: BackupDestinationProcessorOptions
|
||||
): Promise<BackupDestinationResult>;
|
||||
|
||||
abstract validate(config: TConfig): Promise<{ valid: boolean; error?: string; warnings?: string[] }>;
|
||||
|
||||
abstract cleanup(result: BackupDestinationResult): Promise<void>;
|
||||
|
||||
// Getter to check if processor supports streaming
|
||||
abstract get supportsStreaming(): boolean;
|
||||
|
||||
// Optional getter to get a writable stream for streaming backups
|
||||
get getWritableStream():
|
||||
| ((
|
||||
config: TConfig,
|
||||
jobId: string,
|
||||
options?: BackupDestinationProcessorOptions
|
||||
) => Promise<StreamingDestinationHandle>)
|
||||
| undefined {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,13 @@
|
||||
import { forwardRef, Module } from '@nestjs/common';
|
||||
|
||||
import { BackupDestinationService } from '@app/unraid-api/graph/resolvers/backup/destination/backup-destination.service.js';
|
||||
import { RCloneDestinationProcessor } from '@app/unraid-api/graph/resolvers/backup/destination/rclone/rclone-destination-processor.service.js';
|
||||
import { RCloneApiService } from '@app/unraid-api/graph/resolvers/rclone/rclone-api.service.js';
|
||||
import { RCloneModule } from '@app/unraid-api/graph/resolvers/rclone/rclone.module.js';
|
||||
|
||||
@Module({
|
||||
imports: [forwardRef(() => RCloneModule)],
|
||||
providers: [RCloneApiService, BackupDestinationService, RCloneDestinationProcessor],
|
||||
exports: [BackupDestinationService, RCloneDestinationProcessor],
|
||||
})
|
||||
export class BackupDestinationModule {}
|
||||
@@ -0,0 +1,85 @@
|
||||
import { BadRequestException, Injectable, Logger } from '@nestjs/common';
|
||||
import { EventEmitter } from 'events';
|
||||
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
|
||||
import {
|
||||
BackupDestinationConfig,
|
||||
BackupDestinationProcessor,
|
||||
BackupDestinationProcessorOptions,
|
||||
BackupDestinationResult,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/destination/backup-destination-processor.interface.js';
|
||||
import { DestinationType } from '@app/unraid-api/graph/resolvers/backup/destination/backup-destination.types.js';
|
||||
import { RCloneDestinationProcessor } from '@app/unraid-api/graph/resolvers/backup/destination/rclone/rclone-destination-processor.service.js';
|
||||
|
||||
export interface BackupDestinationOptions {
|
||||
jobId?: string;
|
||||
onProgress?: (progress: number) => void;
|
||||
onOutput?: (data: string) => void;
|
||||
onError?: (error: string) => void;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class BackupDestinationService extends EventEmitter {
|
||||
private readonly logger = new Logger(BackupDestinationService.name);
|
||||
|
||||
constructor(private readonly rcloneDestinationProcessor: RCloneDestinationProcessor) {
|
||||
super();
|
||||
}
|
||||
|
||||
async processDestination<T extends BackupDestinationConfig & { type: DestinationType }>(
|
||||
sourcePath: string,
|
||||
config: T,
|
||||
options?: BackupDestinationOptions
|
||||
): Promise<BackupDestinationResult> {
|
||||
const processor = this.getProcessor(config.type);
|
||||
if (!processor) {
|
||||
throw new BadRequestException(`Unsupported destination type: ${config.type}`);
|
||||
}
|
||||
|
||||
const processorOptions: BackupDestinationProcessorOptions = {
|
||||
jobId: options?.jobId || uuidv4(),
|
||||
onProgress: options?.onProgress,
|
||||
onOutput: options?.onOutput,
|
||||
onError: options?.onError,
|
||||
};
|
||||
|
||||
try {
|
||||
const result = await processor.execute(sourcePath, config, processorOptions);
|
||||
this.logger.log(`Destination processing completed for type: ${config.type}`);
|
||||
return result;
|
||||
} catch (error) {
|
||||
this.logger.error(`Destination processing failed for type: ${config.type}`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async cancelDestinationJob(jobId: string): Promise<boolean> {
|
||||
this.logger.log(`Attempting to cancel destination job: ${jobId}`);
|
||||
|
||||
try {
|
||||
const result = await this.rcloneDestinationProcessor.execute('', {} as any, { jobId });
|
||||
if (result.metadata?.jobId) {
|
||||
this.logger.log(`Cancelled destination job: ${jobId}`);
|
||||
return true;
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.warn(`Failed to cancel destination job ${jobId}:`, error);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
async cleanup(): Promise<void> {
|
||||
this.logger.log('Cleaning up destination service...');
|
||||
}
|
||||
|
||||
public getProcessor(type: DestinationType): BackupDestinationProcessor<any> | null {
|
||||
switch (type) {
|
||||
case DestinationType.RCLONE:
|
||||
return this.rcloneDestinationProcessor;
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,95 @@
|
||||
import { createUnionType, Field, InputType, ObjectType, registerEnumType } from '@nestjs/graphql';
|
||||
|
||||
import { Type } from 'class-transformer';
|
||||
import { IsEnum, IsNotEmpty, IsObject, IsOptional, IsString, ValidateNested } from 'class-validator';
|
||||
import { GraphQLJSON } from 'graphql-scalars';
|
||||
|
||||
import { BackupJobStatus } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-status.model.js';
|
||||
|
||||
export enum DestinationType {
|
||||
RCLONE = 'RCLONE',
|
||||
}
|
||||
|
||||
registerEnumType(DestinationType, {
|
||||
name: 'DestinationType',
|
||||
});
|
||||
|
||||
export interface StreamingJobInfo {
|
||||
jobId: string;
|
||||
status: BackupJobStatus;
|
||||
progress?: number;
|
||||
startTime: Date;
|
||||
endTime?: Date;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
@ObjectType()
|
||||
export class RcloneDestinationConfig {
|
||||
@Field(() => String)
|
||||
type!: 'RCLONE';
|
||||
|
||||
@Field(() => String, { description: 'Remote name from rclone config' })
|
||||
remoteName!: string;
|
||||
|
||||
@Field(() => String, { description: 'Destination path on the remote' })
|
||||
destinationPath!: string;
|
||||
|
||||
@Field(() => GraphQLJSON, {
|
||||
description: 'RClone options (e.g., --transfers, --checkers)',
|
||||
nullable: true,
|
||||
})
|
||||
rcloneOptions?: Record<string, unknown>;
|
||||
|
||||
static isTypeOf(obj: any): obj is RcloneDestinationConfig {
|
||||
return (
|
||||
obj &&
|
||||
obj.type === 'RCLONE' &&
|
||||
typeof obj.remoteName === 'string' &&
|
||||
typeof obj.destinationPath === 'string'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@InputType()
|
||||
export class RcloneDestinationConfigInput {
|
||||
@Field(() => String)
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
remoteName!: string;
|
||||
|
||||
@Field(() => String)
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
destinationPath!: string;
|
||||
|
||||
@Field(() => GraphQLJSON, { nullable: true })
|
||||
@IsOptional()
|
||||
@IsObject()
|
||||
rcloneOptions?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
@InputType()
|
||||
export class DestinationConfigInput {
|
||||
@Field(() => DestinationType, { nullable: false })
|
||||
@IsEnum(DestinationType, { message: 'Invalid destination type' })
|
||||
type!: DestinationType;
|
||||
|
||||
@Field(() => RcloneDestinationConfigInput, { nullable: true })
|
||||
@IsOptional()
|
||||
@ValidateNested()
|
||||
@Type(() => RcloneDestinationConfigInput)
|
||||
rcloneConfig?: RcloneDestinationConfigInput;
|
||||
}
|
||||
|
||||
export const DestinationConfigUnion = createUnionType({
|
||||
name: 'DestinationConfigUnion',
|
||||
types: () => [RcloneDestinationConfig] as const,
|
||||
resolveType(obj: any) {
|
||||
if (RcloneDestinationConfig.isTypeOf && RcloneDestinationConfig.isTypeOf(obj)) {
|
||||
return RcloneDestinationConfig;
|
||||
}
|
||||
return null;
|
||||
},
|
||||
});
|
||||
|
||||
export const DestinationConfigInputUnion = DestinationConfigInput;
|
||||
@@ -0,0 +1,357 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
|
||||
import { execa } from 'execa';
|
||||
|
||||
import { getBackupJobGroupId } from '@app/unraid-api/graph/resolvers/backup/backup.utils.js';
|
||||
import {
|
||||
BackupDestinationConfig,
|
||||
BackupDestinationProcessor,
|
||||
BackupDestinationProcessorOptions,
|
||||
BackupDestinationResult,
|
||||
StreamingDestinationHandle,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/destination/backup-destination-processor.interface.js';
|
||||
import { DestinationType } from '@app/unraid-api/graph/resolvers/backup/destination/backup-destination.types.js';
|
||||
import { SourceType } from '@app/unraid-api/graph/resolvers/backup/source/backup-source.types.js';
|
||||
import { RCloneApiService } from '@app/unraid-api/graph/resolvers/rclone/rclone-api.service.js';
|
||||
|
||||
export interface RCloneDestinationConfig extends BackupDestinationConfig {
|
||||
remoteName: string;
|
||||
destinationPath: string;
|
||||
transferOptions?: Record<string, unknown>;
|
||||
useStreaming?: boolean;
|
||||
sourceCommand?: string;
|
||||
sourceArgs?: string[];
|
||||
sourceType?: SourceType;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class RCloneDestinationProcessor extends BackupDestinationProcessor<RCloneDestinationConfig> {
|
||||
readonly destinationType = DestinationType.RCLONE;
|
||||
private readonly logger = new Logger(RCloneDestinationProcessor.name);
|
||||
|
||||
constructor(private readonly rcloneApiService: RCloneApiService) {
|
||||
super();
|
||||
}
|
||||
|
||||
async execute(
|
||||
sourcePath: string,
|
||||
config: RCloneDestinationConfig,
|
||||
options: BackupDestinationProcessorOptions = {}
|
||||
): Promise<BackupDestinationResult> {
|
||||
const { jobId = 'unknown', onProgress, onOutput, onError } = options;
|
||||
|
||||
try {
|
||||
this.logger.log(
|
||||
`Starting RClone upload job ${jobId} from ${sourcePath} to ${config.remoteName}:${config.destinationPath}`
|
||||
);
|
||||
|
||||
return await this.executeRegularBackup(sourcePath, config, options);
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown RClone error';
|
||||
this.logger.error(`RClone upload job ${jobId} failed: ${errorMessage}`, error);
|
||||
|
||||
if (onError) {
|
||||
onError(errorMessage);
|
||||
}
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
cleanupRequired: config.cleanupOnFailure,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private async executeRegularBackup(
|
||||
sourcePath: string,
|
||||
config: RCloneDestinationConfig,
|
||||
options: BackupDestinationProcessorOptions
|
||||
): Promise<BackupDestinationResult> {
|
||||
const { jobId: backupConfigId, onOutput, onProgress, onError } = options;
|
||||
|
||||
if (!backupConfigId) {
|
||||
const errorMsg = 'Backup Configuration ID (jobId) is required to start RClone backup.';
|
||||
this.logger.error(errorMsg);
|
||||
if (onError) {
|
||||
onError(errorMsg);
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
error: errorMsg,
|
||||
cleanupRequired: config.cleanupOnFailure,
|
||||
};
|
||||
}
|
||||
|
||||
await this.rcloneApiService.startBackup({
|
||||
srcPath: sourcePath,
|
||||
dstPath: `${config.remoteName}:${config.destinationPath}`,
|
||||
async: true,
|
||||
configId: backupConfigId,
|
||||
options: config.transferOptions,
|
||||
});
|
||||
|
||||
const groupIdToMonitor = getBackupJobGroupId(backupConfigId);
|
||||
|
||||
if (onOutput) {
|
||||
onOutput(
|
||||
`RClone backup process initiated for group: ${groupIdToMonitor}. Monitoring progress...`
|
||||
);
|
||||
}
|
||||
|
||||
let jobStatus = await this.rcloneApiService.getEnhancedJobStatus(
|
||||
groupIdToMonitor,
|
||||
backupConfigId
|
||||
);
|
||||
this.logger.debug('Rclone Job Status: %o', jobStatus);
|
||||
let retries = 0;
|
||||
const effectiveTimeout = config.timeout && config.timeout >= 60000 ? config.timeout : 3600000;
|
||||
const maxRetries = Math.floor(effectiveTimeout / 5000);
|
||||
|
||||
while (jobStatus && !jobStatus.finished && retries < maxRetries) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 5000));
|
||||
|
||||
try {
|
||||
jobStatus = await this.rcloneApiService.getEnhancedJobStatus(
|
||||
groupIdToMonitor,
|
||||
backupConfigId
|
||||
);
|
||||
if (jobStatus && onProgress && jobStatus.progressPercentage !== undefined) {
|
||||
onProgress(jobStatus.progressPercentage);
|
||||
}
|
||||
if (jobStatus && onOutput && jobStatus.stats?.speed) {
|
||||
onOutput(`Group ${groupIdToMonitor} - Transfer speed: ${jobStatus.stats.speed} B/s`);
|
||||
}
|
||||
} catch (pollError: any) {
|
||||
this.logger.warn(
|
||||
`[${backupConfigId}] Error polling group status for ${groupIdToMonitor}: ${(pollError as Error).message}`
|
||||
);
|
||||
}
|
||||
retries++;
|
||||
}
|
||||
|
||||
if (!jobStatus) {
|
||||
const errorMsg = `Failed to get final job status for RClone group ${groupIdToMonitor}`;
|
||||
this.logger.error(`[${backupConfigId}] ${errorMsg}`);
|
||||
if (onError) {
|
||||
onError(errorMsg);
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
error: errorMsg,
|
||||
destinationPath: `${config.remoteName}:${config.destinationPath}`,
|
||||
cleanupRequired: config.cleanupOnFailure,
|
||||
};
|
||||
}
|
||||
|
||||
if (jobStatus.finished && jobStatus.success) {
|
||||
if (onProgress) {
|
||||
onProgress(100);
|
||||
}
|
||||
if (onOutput) {
|
||||
onOutput(`RClone backup for group ${groupIdToMonitor} completed successfully.`);
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
destinationPath: `${config.remoteName}:${config.destinationPath}`,
|
||||
metadata: {
|
||||
groupId: groupIdToMonitor,
|
||||
remoteName: config.remoteName,
|
||||
remotePath: config.destinationPath,
|
||||
transferOptions: config.transferOptions,
|
||||
stats: jobStatus.stats,
|
||||
},
|
||||
};
|
||||
} else {
|
||||
let errorMsg: string;
|
||||
if (!jobStatus.finished && retries >= maxRetries) {
|
||||
errorMsg = `RClone group ${groupIdToMonitor} timed out after ${effectiveTimeout / 1000} seconds.`;
|
||||
this.logger.error(`[${backupConfigId}] ${errorMsg}`);
|
||||
} else {
|
||||
errorMsg = jobStatus.error || `RClone group ${groupIdToMonitor} failed.`;
|
||||
this.logger.error(`[${backupConfigId}] ${errorMsg}`, jobStatus.stats?.lastError);
|
||||
}
|
||||
|
||||
if (onError) {
|
||||
onError(errorMsg);
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
error: errorMsg,
|
||||
destinationPath: `${config.remoteName}:${config.destinationPath}`,
|
||||
metadata: {
|
||||
groupId: groupIdToMonitor,
|
||||
remoteName: config.remoteName,
|
||||
remotePath: config.destinationPath,
|
||||
transferOptions: config.transferOptions,
|
||||
stats: jobStatus.stats,
|
||||
},
|
||||
cleanupRequired: config.cleanupOnFailure,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async validate(
|
||||
config: RCloneDestinationConfig
|
||||
): Promise<{ valid: boolean; error?: string; warnings?: string[] }> {
|
||||
const warnings: string[] = [];
|
||||
|
||||
if (!config.remoteName) {
|
||||
return { valid: false, error: 'Remote name is required' };
|
||||
}
|
||||
|
||||
if (!config.destinationPath) {
|
||||
return { valid: false, error: 'Remote path is required' };
|
||||
}
|
||||
|
||||
if (config.useStreaming) {
|
||||
if (!config.sourceCommand) {
|
||||
return { valid: false, error: 'Source command is required for streaming backups' };
|
||||
}
|
||||
if (!config.sourceArgs || config.sourceArgs.length === 0) {
|
||||
return { valid: false, error: 'Source arguments are required for streaming backups' };
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const remotes = await this.rcloneApiService.listRemotes();
|
||||
if (!remotes.includes(config.remoteName)) {
|
||||
return { valid: false, error: `Remote '${config.remoteName}' not found` };
|
||||
}
|
||||
} catch (error) {
|
||||
return { valid: false, error: 'Failed to validate remote configuration' };
|
||||
}
|
||||
|
||||
if (config.timeout < 60000) {
|
||||
warnings.push('Timeout is less than 1 minute, which may be too short for large uploads');
|
||||
}
|
||||
|
||||
return { valid: true, warnings };
|
||||
}
|
||||
|
||||
async cleanup(result: BackupDestinationResult): Promise<void> {
|
||||
if (!result.cleanupRequired || !result.destinationPath) {
|
||||
return;
|
||||
}
|
||||
|
||||
const idToStop = result.metadata?.groupId || result.metadata?.jobId;
|
||||
|
||||
try {
|
||||
this.logger.log(`Cleaning up failed upload at ${result.destinationPath}`);
|
||||
|
||||
if (idToStop) {
|
||||
await this.rcloneApiService.stopJob(idToStop as string);
|
||||
if (result.metadata?.groupId) {
|
||||
this.logger.log(`Stopped RClone group: ${result.metadata.groupId}`);
|
||||
} else if (result.metadata?.jobId) {
|
||||
this.logger.log(
|
||||
`Attempted to stop RClone job: ${result.metadata.jobId} (Note: Group ID preferred for cleanup)`
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.warn(
|
||||
`Failed to cleanup destination: ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
get supportsStreaming(): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
get getWritableStream(): (
|
||||
config: RCloneDestinationConfig,
|
||||
jobId: string,
|
||||
options?: BackupDestinationProcessorOptions
|
||||
) => Promise<StreamingDestinationHandle> {
|
||||
return async (
|
||||
config: RCloneDestinationConfig,
|
||||
jobId: string,
|
||||
options: BackupDestinationProcessorOptions = {}
|
||||
): Promise<StreamingDestinationHandle> => {
|
||||
const validation = await this.validate(config);
|
||||
if (!validation.valid) {
|
||||
const errorMsg = `RClone destination configuration validation failed: ${validation.error}`;
|
||||
this.logger.error(`[${jobId}] ${errorMsg}`);
|
||||
throw new Error(errorMsg);
|
||||
}
|
||||
|
||||
const rcloneDest = `${config.remoteName}:${config.destinationPath}`;
|
||||
const rcloneArgs = ['rcat', rcloneDest, '--progress'];
|
||||
|
||||
this.logger.log(
|
||||
`[${jobId}] Preparing writable stream for rclone rcat to ${rcloneDest} with progress`
|
||||
);
|
||||
|
||||
try {
|
||||
const rcloneProcess = execa('rclone', rcloneArgs, {});
|
||||
|
||||
const completionPromise = new Promise<BackupDestinationResult>((resolve, reject) => {
|
||||
let stderrOutput = '';
|
||||
let stdoutOutput = '';
|
||||
|
||||
rcloneProcess.stderr?.on('data', (data) => {
|
||||
const chunk = data.toString();
|
||||
stderrOutput += chunk;
|
||||
this.logger.verbose(`[${jobId}] rclone rcat stderr: ${chunk.trim()}`);
|
||||
|
||||
const progressMatch = chunk.match(/(\d+)%/);
|
||||
if (progressMatch && progressMatch[1] && options.onProgress) {
|
||||
const percentage = parseInt(progressMatch[1], 10);
|
||||
if (!isNaN(percentage)) {
|
||||
options.onProgress(percentage);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
rcloneProcess.stdout?.on('data', (data) => {
|
||||
const chunk = data.toString();
|
||||
stdoutOutput += chunk;
|
||||
this.logger.verbose(`[${jobId}] rclone rcat stdout: ${chunk.trim()}`);
|
||||
});
|
||||
|
||||
rcloneProcess
|
||||
.then((result) => {
|
||||
this.logger.log(
|
||||
`[${jobId}] rclone rcat to ${rcloneDest} completed successfully.`
|
||||
);
|
||||
resolve({
|
||||
success: true,
|
||||
destinationPath: rcloneDest,
|
||||
metadata: { stdout: stdoutOutput, stderr: stderrOutput },
|
||||
});
|
||||
})
|
||||
.catch((error) => {
|
||||
const errorMessage =
|
||||
error.stderr || error.message || 'rclone rcat command failed';
|
||||
this.logger.error(
|
||||
`[${jobId}] rclone rcat to ${rcloneDest} failed: ${errorMessage}`,
|
||||
error.stack
|
||||
);
|
||||
reject({
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
destinationPath: rcloneDest,
|
||||
metadata: { stdout: stdoutOutput, stderr: stderrOutput },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
if (!rcloneProcess.stdin) {
|
||||
const errMsg = 'Failed to get stdin stream from rclone process.';
|
||||
this.logger.error(`[${jobId}] ${errMsg}`);
|
||||
throw new Error(errMsg);
|
||||
}
|
||||
|
||||
return {
|
||||
stream: rcloneProcess.stdin,
|
||||
completionPromise,
|
||||
};
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
this.logger.error(`[${jobId}] Failed to start rclone rcat process: ${errorMessage}`);
|
||||
throw new Error(`Failed to start rclone rcat process: ${errorMessage}`);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,189 @@
|
||||
import type { LabelElement, Layout, SchemaBasedCondition } from '@jsonforms/core';
|
||||
import { JsonSchema7, RuleEffect } from '@jsonforms/core';
|
||||
|
||||
import type { RCloneRemote } from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
|
||||
import type { DataSlice, SettingSlice, UIElement } from '@app/unraid-api/types/json-forms.js';
|
||||
import { getDestinationConfigSlice } from '@app/unraid-api/graph/resolvers/backup/destination/backup-destination-jsonforms.config.js';
|
||||
import { getSourceConfigSlice } from '@app/unraid-api/graph/resolvers/backup/source/backup-source-jsonforms.config.js';
|
||||
import { createLabeledControl } from '@app/unraid-api/graph/utils/form-utils.js';
|
||||
import { mergeSettingSlices } from '@app/unraid-api/types/json-forms.js';
|
||||
|
||||
function getBasicBackupConfigSlice(): SettingSlice {
|
||||
const basicConfigElements: UIElement[] = [
|
||||
createLabeledControl({
|
||||
scope: '#/properties/name',
|
||||
label: 'Backup Job Name',
|
||||
description: 'A descriptive name for this backup job (e.g., "Weekly Documents Backup")',
|
||||
controlOptions: {
|
||||
placeholder: 'Enter backup job name',
|
||||
format: 'string',
|
||||
},
|
||||
}),
|
||||
|
||||
createLabeledControl({
|
||||
scope: '#/properties/schedule',
|
||||
label: 'Schedule (Cron Expression)',
|
||||
description:
|
||||
'When to run this backup job. Leave empty for manual execution only. Examples: "0 2 * * *" (daily at 2AM), "0 2 * * 0" (weekly on Sunday at 2AM)',
|
||||
controlOptions: {
|
||||
placeholder: 'Leave empty for manual backup',
|
||||
format: 'string',
|
||||
suggestions: [
|
||||
{
|
||||
value: '',
|
||||
label: 'Manual Only',
|
||||
tooltip: 'No automatic schedule - run manually only',
|
||||
},
|
||||
{
|
||||
value: '0 2 * * *',
|
||||
label: 'Daily at 2:00 AM',
|
||||
tooltip: 'Runs every day at 2:00 AM',
|
||||
},
|
||||
{
|
||||
value: '0 2 * * 0',
|
||||
label: 'Weekly (Sunday 2:00 AM)',
|
||||
tooltip: 'Runs every Sunday at 2:00 AM',
|
||||
},
|
||||
{
|
||||
value: '0 9 * * 1',
|
||||
label: 'Mondays at 9:00 AM',
|
||||
tooltip: 'Runs every Monday at 9:00 AM',
|
||||
},
|
||||
{
|
||||
value: '0 0 1 * *',
|
||||
label: 'Monthly (1st day at midnight)',
|
||||
tooltip: 'Runs on the 1st day of every month at midnight',
|
||||
},
|
||||
{
|
||||
value: '0 2 1 * *',
|
||||
label: 'Monthly (1st at 2:00 AM)',
|
||||
tooltip: 'Runs on the 1st of every month at 2:00 AM',
|
||||
},
|
||||
{
|
||||
value: '0 2 * * 1-5',
|
||||
label: 'Weekdays at 2:00 AM',
|
||||
tooltip: 'Runs Monday through Friday at 2:00 AM',
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
|
||||
createLabeledControl({
|
||||
scope: '#/properties/enabled',
|
||||
label: 'Enable Backup Job',
|
||||
description: 'Whether this backup job should run automatically according to the schedule',
|
||||
controlOptions: {
|
||||
toggle: true,
|
||||
},
|
||||
rule: {
|
||||
effect: RuleEffect.SHOW,
|
||||
condition: {
|
||||
scope: '#/properties/schedule',
|
||||
schema: {
|
||||
type: 'string',
|
||||
minLength: 1,
|
||||
},
|
||||
} as SchemaBasedCondition,
|
||||
},
|
||||
}),
|
||||
];
|
||||
|
||||
const basicConfigProperties: Record<string, JsonSchema7> = {
|
||||
name: {
|
||||
type: 'string',
|
||||
title: 'Backup Job Name',
|
||||
description: 'Human-readable name for this backup job',
|
||||
minLength: 1,
|
||||
maxLength: 100,
|
||||
},
|
||||
schedule: {
|
||||
type: 'string',
|
||||
title: 'Cron Schedule',
|
||||
description: 'Cron schedule expression (empty for manual execution)',
|
||||
},
|
||||
enabled: {
|
||||
type: 'boolean',
|
||||
title: 'Enabled',
|
||||
description: 'Whether this backup job is enabled',
|
||||
default: true,
|
||||
},
|
||||
configStep: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
current: { type: 'integer', default: 0 },
|
||||
total: { type: 'integer', default: 3 },
|
||||
},
|
||||
default: { current: 0, total: 3 },
|
||||
},
|
||||
};
|
||||
|
||||
const verticalLayoutElement: UIElement = {
|
||||
type: 'VerticalLayout',
|
||||
elements: basicConfigElements,
|
||||
options: { step: 0 },
|
||||
};
|
||||
|
||||
return {
|
||||
properties: basicConfigProperties as unknown as DataSlice,
|
||||
elements: [verticalLayoutElement],
|
||||
};
|
||||
}
|
||||
|
||||
export function buildBackupJobConfigSchema({ remotes = [] }: { remotes?: RCloneRemote[] }): {
|
||||
dataSchema: { properties: DataSlice; type: 'object' };
|
||||
uiSchema: Layout;
|
||||
} {
|
||||
const slicesToMerge: SettingSlice[] = [];
|
||||
|
||||
const basicSlice = getBasicBackupConfigSlice();
|
||||
slicesToMerge.push(basicSlice);
|
||||
|
||||
const sourceSlice = getSourceConfigSlice();
|
||||
slicesToMerge.push(sourceSlice);
|
||||
|
||||
const destinationSlice = getDestinationConfigSlice({ remotes });
|
||||
slicesToMerge.push(destinationSlice);
|
||||
|
||||
const mergedSlices = mergeSettingSlices(slicesToMerge);
|
||||
|
||||
const dataSchema: { properties: DataSlice; type: 'object' } = {
|
||||
type: 'object',
|
||||
properties: mergedSlices.properties,
|
||||
};
|
||||
|
||||
const steps = [
|
||||
{ label: 'Backup Configuration', description: 'Basic backup job settings' },
|
||||
{ label: 'Source Configuration', description: 'Configure what to backup' },
|
||||
{ label: 'Destination Configuration', description: 'Configure where to backup' },
|
||||
];
|
||||
|
||||
const step0Elements = basicSlice.elements;
|
||||
const step1Elements = sourceSlice.elements;
|
||||
const step2Elements = destinationSlice.elements;
|
||||
|
||||
const steppedLayoutElement: UIElement = {
|
||||
type: 'SteppedLayout',
|
||||
options: {
|
||||
steps: steps,
|
||||
},
|
||||
elements: [...(step0Elements || []), ...(step1Elements || []), ...(step2Elements || [])].filter(
|
||||
(el) => el
|
||||
) as UIElement[],
|
||||
};
|
||||
|
||||
const titleLabel: UIElement = {
|
||||
type: 'Label',
|
||||
text: 'Create Backup Job',
|
||||
options: {
|
||||
format: 'title',
|
||||
description: 'Configure a new scheduled backup job with RClone.',
|
||||
},
|
||||
};
|
||||
|
||||
const uiSchema: Layout = {
|
||||
type: 'VerticalLayout',
|
||||
elements: [titleLabel, steppedLayoutElement],
|
||||
};
|
||||
|
||||
return { dataSchema, uiSchema };
|
||||
}
|
||||
@@ -0,0 +1,76 @@
|
||||
import { Field, GraphQLISODateTime, Int, ObjectType, registerEnumType } from '@nestjs/graphql';
|
||||
|
||||
import { Node } from '@unraid/shared/graphql.model';
|
||||
|
||||
// Moved BackupJobStatus enum here
|
||||
export enum BackupJobStatus {
|
||||
QUEUED = 'Queued',
|
||||
RUNNING = 'Running',
|
||||
COMPLETED = 'Completed',
|
||||
FAILED = 'Failed',
|
||||
CANCELLED = 'Cancelled',
|
||||
}
|
||||
|
||||
registerEnumType(BackupJobStatus, {
|
||||
name: 'BackupJobStatus',
|
||||
description: 'Status of a backup job',
|
||||
});
|
||||
|
||||
@ObjectType({
|
||||
implements: () => Node,
|
||||
})
|
||||
export class JobStatus extends Node {
|
||||
@Field(() => String, { description: 'External job ID from the job execution system' })
|
||||
externalJobId!: string;
|
||||
|
||||
@Field()
|
||||
name!: string;
|
||||
|
||||
@Field(() => BackupJobStatus)
|
||||
status!: BackupJobStatus;
|
||||
|
||||
@Field(() => Int, { description: 'Progress percentage (0-100)' })
|
||||
progress!: number;
|
||||
|
||||
@Field({ nullable: true })
|
||||
message?: string;
|
||||
|
||||
@Field({ nullable: true })
|
||||
error?: string;
|
||||
|
||||
@Field(() => GraphQLISODateTime)
|
||||
startTime!: Date;
|
||||
|
||||
@Field(() => GraphQLISODateTime, { nullable: true })
|
||||
endTime?: Date;
|
||||
|
||||
@Field(() => Int, { nullable: true, description: 'Bytes transferred' })
|
||||
bytesTransferred?: number;
|
||||
|
||||
@Field(() => Int, { nullable: true, description: 'Total bytes to transfer' })
|
||||
totalBytes?: number;
|
||||
|
||||
@Field(() => Int, { nullable: true, description: 'Transfer speed in bytes per second' })
|
||||
speed?: number;
|
||||
|
||||
@Field(() => Int, { nullable: true, description: 'Elapsed time in seconds' })
|
||||
elapsedTime?: number;
|
||||
|
||||
@Field(() => Int, { nullable: true, description: 'Estimated time to completion in seconds' })
|
||||
eta?: number;
|
||||
|
||||
@Field(() => String, { nullable: true, description: 'Human-readable bytes transferred' })
|
||||
formattedBytesTransferred?: string;
|
||||
|
||||
@Field(() => String, { nullable: true, description: 'Human-readable transfer speed' })
|
||||
formattedSpeed?: string;
|
||||
|
||||
@Field(() => String, { nullable: true, description: 'Human-readable elapsed time' })
|
||||
formattedElapsedTime?: string;
|
||||
|
||||
@Field(() => String, { nullable: true, description: 'Human-readable ETA' })
|
||||
formattedEta?: string;
|
||||
}
|
||||
|
||||
// Use JobStatus as the unified type for both GraphQL and TypeScript
|
||||
export type JobStatusInfo = JobStatus;
|
||||
@@ -0,0 +1,30 @@
|
||||
import { Args, Query, Resolver } from '@nestjs/graphql';
|
||||
|
||||
import { PrefixedID } from '@unraid/shared/prefixed-id-scalar.js';
|
||||
|
||||
import { JobStatus } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-status.model.js';
|
||||
import { BackupJobTrackingService } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-tracking.service.js';
|
||||
|
||||
@Resolver(() => JobStatus)
|
||||
export class BackupJobStatusResolver {
|
||||
constructor(private readonly jobTrackingService: BackupJobTrackingService) {}
|
||||
|
||||
@Query(() => JobStatus, { name: 'backupJobStatus', nullable: true })
|
||||
async getJobStatus(
|
||||
@Args('jobId', { type: () => PrefixedID }) jobId: string
|
||||
): Promise<JobStatus | null> {
|
||||
const jobStatus = this.jobTrackingService.getJobStatus(jobId);
|
||||
if (!jobStatus) {
|
||||
// Optionally throw NotFoundException or return null based on desired API behavior
|
||||
// throw new NotFoundException(`Job with ID ${jobId} not found.`);
|
||||
return null;
|
||||
}
|
||||
return jobStatus as JobStatus; // Map JobStatusInfo to JobStatusGQL if necessary
|
||||
}
|
||||
|
||||
@Query(() => [JobStatus], { name: 'allBackupJobStatuses' })
|
||||
async getAllJobStatuses(): Promise<JobStatus[]> {
|
||||
const statuses = this.jobTrackingService.getAllJobStatuses();
|
||||
return statuses as JobStatus[]; // Map JobStatusInfo[] to JobStatusGQL[] if necessary
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,119 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
|
||||
import {
|
||||
BackupJobStatus,
|
||||
JobStatus,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-status.model.js';
|
||||
|
||||
@Injectable()
|
||||
export class BackupJobTrackingService {
|
||||
private readonly logger = new Logger(BackupJobTrackingService.name);
|
||||
private activeJobs: Map<string, JobStatus> = new Map(); // Maps internal ID -> JobStatus
|
||||
private externalJobIndex: Map<string, string> = new Map(); // Maps external ID -> internal ID
|
||||
|
||||
constructor() {
|
||||
// Potentially load persisted jobs if needed
|
||||
}
|
||||
|
||||
initializeJob(externalJobId: string, jobName: string): JobStatus {
|
||||
// Check if external job already exists
|
||||
const existingInternalId = this.externalJobIndex.get(externalJobId);
|
||||
if (existingInternalId && this.activeJobs.has(existingInternalId)) {
|
||||
this.logger.warn(`Job with external ID ${externalJobId} is already initialized.`);
|
||||
return this.activeJobs.get(existingInternalId)!;
|
||||
}
|
||||
|
||||
const internalId = `job_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
||||
const newJobStatus: JobStatus = {
|
||||
id: internalId,
|
||||
externalJobId,
|
||||
name: jobName,
|
||||
status: BackupJobStatus.QUEUED,
|
||||
progress: 0,
|
||||
startTime: new Date(),
|
||||
message: 'Job initialized.',
|
||||
};
|
||||
|
||||
this.activeJobs.set(internalId, newJobStatus);
|
||||
this.externalJobIndex.set(externalJobId, internalId);
|
||||
this.logger.log(
|
||||
`Job initialized: ${jobName} (Internal ID: ${internalId}, External ID: ${externalJobId})`
|
||||
);
|
||||
return newJobStatus;
|
||||
}
|
||||
|
||||
updateJobStatus(
|
||||
internalId: string,
|
||||
updates: Partial<Omit<JobStatus, 'externalJobId' | 'startTime' | 'name' | 'id'>>
|
||||
): JobStatus | null {
|
||||
const job = this.activeJobs.get(internalId);
|
||||
if (!job) {
|
||||
this.logger.warn(`Cannot update status for unknown internal job ID: ${internalId}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
const updatedJob = { ...job, ...updates };
|
||||
|
||||
if (
|
||||
updates.status === BackupJobStatus.COMPLETED ||
|
||||
updates.status === BackupJobStatus.FAILED ||
|
||||
updates.status === BackupJobStatus.CANCELLED
|
||||
) {
|
||||
updatedJob.endTime = new Date();
|
||||
updatedJob.progress = updates.status === BackupJobStatus.COMPLETED ? 100 : job.progress;
|
||||
}
|
||||
|
||||
if (updatedJob.progress > 100) {
|
||||
updatedJob.progress = 100;
|
||||
}
|
||||
|
||||
this.activeJobs.set(internalId, updatedJob);
|
||||
this.logger.log(
|
||||
`Job status updated for ${job.name} (Internal ID: ${internalId}): Status: ${updatedJob.status}, Progress: ${updatedJob.progress}%`
|
||||
);
|
||||
return updatedJob;
|
||||
}
|
||||
|
||||
// For external systems (like RClone) to update job status
|
||||
updateJobStatusByExternalId(
|
||||
externalJobId: string,
|
||||
updates: Partial<Omit<JobStatus, 'externalJobId' | 'startTime' | 'name' | 'id'>>
|
||||
): JobStatus | null {
|
||||
const internalId = this.externalJobIndex.get(externalJobId);
|
||||
if (!internalId) {
|
||||
this.logger.warn(`Cannot find internal job for external ID: ${externalJobId}`);
|
||||
return null;
|
||||
}
|
||||
return this.updateJobStatus(internalId, updates);
|
||||
}
|
||||
|
||||
getJobStatus(internalId: string): JobStatus | undefined {
|
||||
return this.activeJobs.get(internalId);
|
||||
}
|
||||
|
||||
getJobStatusByExternalId(externalJobId: string): JobStatus | undefined {
|
||||
const internalId = this.externalJobIndex.get(externalJobId);
|
||||
return internalId ? this.activeJobs.get(internalId) : undefined;
|
||||
}
|
||||
|
||||
getAllJobStatuses(): JobStatus[] {
|
||||
return Array.from(this.activeJobs.values());
|
||||
}
|
||||
|
||||
clearJob(internalId: string): boolean {
|
||||
const job = this.activeJobs.get(internalId);
|
||||
if (job) {
|
||||
this.externalJobIndex.delete(job.externalJobId);
|
||||
}
|
||||
return this.activeJobs.delete(internalId);
|
||||
}
|
||||
|
||||
clearJobByExternalId(externalJobId: string): boolean {
|
||||
const internalId = this.externalJobIndex.get(externalJobId);
|
||||
if (internalId) {
|
||||
this.externalJobIndex.delete(externalJobId);
|
||||
return this.activeJobs.delete(internalId);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,534 @@
|
||||
import { forwardRef, Inject, Injectable, Logger } from '@nestjs/common';
|
||||
import { Readable } from 'stream';
|
||||
import { pipeline } from 'stream/promises'; // Using stream.pipeline for better error handling
|
||||
|
||||
import { BackupConfigService } from '@app/unraid-api/graph/resolvers/backup/backup-config.service.js';
|
||||
import { BackupJobConfig } from '@app/unraid-api/graph/resolvers/backup/backup.model.js';
|
||||
import {
|
||||
BackupDestinationProcessor,
|
||||
BackupDestinationProcessorOptions,
|
||||
BackupDestinationResult,
|
||||
StreamingDestinationHandle, // Assuming this will be defined in the interface file
|
||||
} from '@app/unraid-api/graph/resolvers/backup/destination/backup-destination-processor.interface.js';
|
||||
import { BackupDestinationService } from '@app/unraid-api/graph/resolvers/backup/destination/backup-destination.service.js';
|
||||
import {
|
||||
BackupJobStatus,
|
||||
JobStatus,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-status.model.js';
|
||||
import { BackupJobTrackingService } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-tracking.service.js';
|
||||
import {
|
||||
BackupSourceProcessor,
|
||||
BackupSourceProcessorOptions,
|
||||
BackupSourceResult,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/source/backup-source-processor.interface.js';
|
||||
import { BackupSourceService } from '@app/unraid-api/graph/resolvers/backup/source/backup-source.service.js';
|
||||
|
||||
@Injectable()
|
||||
export class BackupOrchestrationService {
|
||||
private readonly logger = new Logger(BackupOrchestrationService.name);
|
||||
|
||||
constructor(
|
||||
private readonly jobTrackingService: BackupJobTrackingService,
|
||||
private readonly backupSourceService: BackupSourceService,
|
||||
private readonly backupDestinationService: BackupDestinationService,
|
||||
@Inject(forwardRef(() => BackupConfigService))
|
||||
private readonly backupConfigService: BackupConfigService
|
||||
) {}
|
||||
|
||||
async executeBackupJob(jobConfig: BackupJobConfig, configId: string): Promise<string> {
|
||||
this.logger.log(
|
||||
`Starting orchestration for backup job: ${jobConfig.name} (Config ID: ${configId})`
|
||||
);
|
||||
|
||||
// Initialize job in tracking service and get the internal tracking object
|
||||
// configId (original jobConfig.id) is used to link tracking to config, jobConfig.name is for display
|
||||
const jobStatus = this.jobTrackingService.initializeJob(configId, jobConfig.name);
|
||||
const internalJobId = jobStatus.id; // This is the actual ID for this specific job run
|
||||
|
||||
// DO NOT call backupConfigService.updateBackupJobConfig here for currentJobId
|
||||
// This will be handled by BackupConfigService itself using the returned internalJobId
|
||||
|
||||
this.emitJobStatus(internalJobId, {
|
||||
status: BackupJobStatus.RUNNING,
|
||||
progress: 0,
|
||||
message: 'Job initializing...',
|
||||
});
|
||||
|
||||
const sourceProcessor = this.backupSourceService.getProcessor(jobConfig.sourceType);
|
||||
const destinationProcessor = this.backupDestinationService.getProcessor(
|
||||
jobConfig.destinationType
|
||||
);
|
||||
|
||||
if (!sourceProcessor || !destinationProcessor) {
|
||||
const errorMsg = 'Failed to initialize backup processors.';
|
||||
this.logger.error(`[Config ID: ${configId}, Job ID: ${internalJobId}] ${errorMsg}`);
|
||||
this.emitJobStatus(internalJobId, {
|
||||
status: BackupJobStatus.FAILED,
|
||||
error: errorMsg,
|
||||
});
|
||||
// Call handleJobCompletion before throwing
|
||||
await this.backupConfigService.handleJobCompletion(
|
||||
configId,
|
||||
BackupJobStatus.FAILED,
|
||||
internalJobId
|
||||
);
|
||||
throw new Error(errorMsg);
|
||||
}
|
||||
|
||||
try {
|
||||
if (sourceProcessor.supportsStreaming && destinationProcessor.supportsStreaming) {
|
||||
await this.executeStreamingBackup(
|
||||
sourceProcessor,
|
||||
destinationProcessor,
|
||||
jobConfig,
|
||||
internalJobId
|
||||
);
|
||||
} else {
|
||||
await this.executeRegularBackup(
|
||||
sourceProcessor,
|
||||
destinationProcessor,
|
||||
jobConfig,
|
||||
internalJobId,
|
||||
configId // Pass configId for handleJobCompletion
|
||||
);
|
||||
}
|
||||
// If executeStreamingBackup/executeRegularBackup complete without throwing, it implies success for those stages.
|
||||
// The final status (COMPLETED/FAILED) is set within those methods via emitJobStatus and then handleJobCompletion.
|
||||
} catch (error) {
|
||||
// Errors from executeStreamingBackup/executeRegularBackup should have already called handleJobCompletion.
|
||||
// This catch is a fallback.
|
||||
this.logger.error(
|
||||
`[Config ID: ${configId}, Job ID: ${internalJobId}] Orchestration error after backup execution attempt: ${(error as Error).message}`
|
||||
);
|
||||
// Ensure completion is handled if not already done by the execution methods
|
||||
// This might be redundant if execution methods are guaranteed to call it.
|
||||
// However, direct throws before or after calling those methods would be caught here.
|
||||
await this.backupConfigService.handleJobCompletion(
|
||||
configId,
|
||||
BackupJobStatus.FAILED,
|
||||
internalJobId
|
||||
);
|
||||
throw error; // Re-throw the error
|
||||
}
|
||||
// DO NOT clear currentJobId here using updateBackupJobConfig. It's handled by handleJobCompletion.
|
||||
|
||||
this.logger.log(
|
||||
`Finished orchestration logic for backup job: ${jobConfig.name} (Config ID: ${configId}, Job ID: ${internalJobId})`
|
||||
);
|
||||
return internalJobId; // Return the actual job ID for this run
|
||||
}
|
||||
|
||||
private async executeStreamingBackup(
|
||||
sourceProcessor: BackupSourceProcessor<any>,
|
||||
destinationProcessor: BackupDestinationProcessor<any>,
|
||||
jobConfig: BackupJobConfig, // This is the config object, not its ID
|
||||
internalJobId: string
|
||||
): Promise<void> {
|
||||
this.logger.log(
|
||||
`Executing STREAMING backup for job: ${jobConfig.name} (Internal Job ID: ${internalJobId})`
|
||||
);
|
||||
this.emitJobStatus(internalJobId, {
|
||||
status: BackupJobStatus.RUNNING,
|
||||
progress: 0,
|
||||
message: 'Starting streaming backup...',
|
||||
});
|
||||
|
||||
if (!sourceProcessor.getReadableStream || !destinationProcessor.getWritableStream) {
|
||||
const errorMsg =
|
||||
'Source or destination processor does not support streaming (missing getReadableStream or getWritableStream).';
|
||||
this.logger.error(`[${internalJobId}] ${errorMsg}`);
|
||||
this.emitJobStatus(internalJobId, { status: BackupJobStatus.FAILED, error: errorMsg });
|
||||
// Call handleJobCompletion before throwing
|
||||
await this.backupConfigService.handleJobCompletion(internalJobId, BackupJobStatus.FAILED);
|
||||
throw new Error(errorMsg);
|
||||
}
|
||||
|
||||
let sourceStream: Readable | null = null;
|
||||
let destinationStreamHandle: StreamingDestinationHandle | null = null;
|
||||
|
||||
const processorOptions: BackupDestinationProcessorOptions = {
|
||||
jobId: internalJobId,
|
||||
onProgress: (progress: number) => {
|
||||
this.logger.log(`[${internalJobId}] Destination progress: ${progress}%`);
|
||||
this.emitJobStatus(internalJobId, { progress: Math.min(progress, 99) });
|
||||
},
|
||||
onOutput: (data: string) => {
|
||||
this.logger.debug(`[${internalJobId} Dest. Processor Output]: ${data}`);
|
||||
},
|
||||
onError: (errorMsg: string) => {
|
||||
this.logger.warn(`[${internalJobId} Dest. Processor Error]: ${errorMsg}`);
|
||||
},
|
||||
};
|
||||
|
||||
try {
|
||||
this.logger.debug(`[${internalJobId}] Preparing source stream...`);
|
||||
sourceStream = await sourceProcessor.getReadableStream(jobConfig.sourceConfig);
|
||||
this.logger.debug(
|
||||
`[${internalJobId}] Source stream prepared. Preparing destination stream...`
|
||||
);
|
||||
destinationStreamHandle = await destinationProcessor.getWritableStream(
|
||||
jobConfig.destinationConfig,
|
||||
internalJobId,
|
||||
processorOptions
|
||||
);
|
||||
this.logger.debug(`[${internalJobId}] Destination stream prepared. Starting stream pipe.`);
|
||||
|
||||
if (!sourceStream || !destinationStreamHandle?.stream) {
|
||||
throw new Error('Failed to initialize source or destination stream.');
|
||||
}
|
||||
|
||||
let totalBytesProcessed = 0;
|
||||
sourceStream.on('data', (chunk) => {
|
||||
totalBytesProcessed += chunk.length;
|
||||
this.logger.verbose(
|
||||
`[${internalJobId}] Stream data: ${chunk.length} bytes, Total: ${totalBytesProcessed}`
|
||||
);
|
||||
});
|
||||
|
||||
await pipeline(sourceStream, destinationStreamHandle.stream);
|
||||
|
||||
this.logger.log(
|
||||
`[${internalJobId}] Stream piping completed. Waiting for destination processor to finalize...`
|
||||
);
|
||||
|
||||
const destinationResult = await destinationStreamHandle.completionPromise;
|
||||
|
||||
if (!destinationResult.success) {
|
||||
const errorMsg =
|
||||
destinationResult.error || 'Destination processor failed after streaming.';
|
||||
this.logger.error(`[${internalJobId}] ${errorMsg}`);
|
||||
this.emitJobStatus(internalJobId, { status: BackupJobStatus.FAILED, error: errorMsg });
|
||||
// Call handleJobCompletion before throwing
|
||||
await this.backupConfigService.handleJobCompletion(
|
||||
jobConfig.id,
|
||||
BackupJobStatus.FAILED,
|
||||
internalJobId
|
||||
);
|
||||
throw new Error(errorMsg);
|
||||
}
|
||||
|
||||
this.logger.log(
|
||||
`Streaming backup job ${jobConfig.name} (Internal ID: ${internalJobId}) completed successfully.`
|
||||
);
|
||||
this.emitJobStatus(internalJobId, {
|
||||
status: BackupJobStatus.COMPLETED,
|
||||
progress: 100,
|
||||
message: 'Backup completed successfully.',
|
||||
});
|
||||
// Call handleJobCompletion on success
|
||||
await this.backupConfigService.handleJobCompletion(
|
||||
jobConfig.id,
|
||||
BackupJobStatus.COMPLETED,
|
||||
internalJobId
|
||||
);
|
||||
|
||||
if (sourceProcessor.cleanup) {
|
||||
this.logger.debug(`[${internalJobId}] Performing post-success cleanup for source...`);
|
||||
await sourceProcessor.cleanup({
|
||||
success: true,
|
||||
outputPath: 'streamed',
|
||||
cleanupRequired: false,
|
||||
} as any);
|
||||
}
|
||||
if (destinationProcessor.cleanup) {
|
||||
this.logger.debug(
|
||||
`[${internalJobId}] Performing post-success cleanup for destination...`
|
||||
);
|
||||
await destinationProcessor.cleanup({ success: true, cleanupRequired: false });
|
||||
}
|
||||
} catch (e) {
|
||||
const error = e as Error;
|
||||
this.logger.error(
|
||||
`Streaming backup job ${jobConfig.name} (Internal ID: ${internalJobId}) failed: ${error.message}`,
|
||||
error.stack
|
||||
);
|
||||
|
||||
this.emitJobStatus(internalJobId, {
|
||||
status: BackupJobStatus.FAILED,
|
||||
error: error.message,
|
||||
message: 'Backup failed during streaming execution.',
|
||||
});
|
||||
// Call handleJobCompletion on failure
|
||||
await this.backupConfigService.handleJobCompletion(
|
||||
jobConfig.id,
|
||||
BackupJobStatus.FAILED,
|
||||
internalJobId
|
||||
);
|
||||
|
||||
this.logger.error(
|
||||
`[${internalJobId}] Performing cleanup due to failure for job ${jobConfig.name}...`
|
||||
);
|
||||
try {
|
||||
if (sourceProcessor.cleanup) {
|
||||
this.logger.debug(`[${internalJobId}] Cleaning up source processor...`);
|
||||
await sourceProcessor.cleanup({
|
||||
success: false,
|
||||
error: error.message,
|
||||
cleanupRequired: true,
|
||||
} as any);
|
||||
}
|
||||
} catch (cleanupError) {
|
||||
this.logger.error(
|
||||
`[${internalJobId}] Error during source processor cleanup: ${(cleanupError as Error).message}`,
|
||||
(cleanupError as Error).stack
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
if (destinationProcessor.cleanup) {
|
||||
this.logger.debug(`[${internalJobId}] Cleaning up destination processor...`);
|
||||
const destCleanupError =
|
||||
(
|
||||
destinationStreamHandle?.completionPromise &&
|
||||
((await destinationStreamHandle.completionPromise.catch(
|
||||
(er) => er
|
||||
)) as BackupDestinationResult)
|
||||
)?.error || error.message;
|
||||
await destinationProcessor.cleanup({
|
||||
success: false,
|
||||
error: destCleanupError,
|
||||
cleanupRequired: true,
|
||||
});
|
||||
}
|
||||
} catch (cleanupError) {
|
||||
this.logger.error(
|
||||
`[${internalJobId}] Error during destination processor cleanup: ${(cleanupError as Error).message}`,
|
||||
(cleanupError as Error).stack
|
||||
);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private async executeRegularBackup(
|
||||
sourceProcessor: BackupSourceProcessor<any>,
|
||||
destinationProcessor: BackupDestinationProcessor<any>,
|
||||
jobConfig: BackupJobConfig, // This is the config object, not its ID
|
||||
internalJobId: string,
|
||||
configId: string // Pass the configId for handleJobCompletion
|
||||
): Promise<void> {
|
||||
this.logger.log(
|
||||
`Executing REGULAR backup for job: ${jobConfig.name} (Config ID: ${configId}, Internal Job ID: ${internalJobId})`
|
||||
);
|
||||
this.emitJobStatus(internalJobId, {
|
||||
status: BackupJobStatus.RUNNING,
|
||||
progress: 0,
|
||||
message: 'Starting regular backup...',
|
||||
});
|
||||
|
||||
let sourceResult: BackupSourceResult | null = null;
|
||||
let destinationResult: BackupDestinationResult | null = null;
|
||||
|
||||
const processorOptions: BackupSourceProcessorOptions & BackupDestinationProcessorOptions = {
|
||||
jobId: internalJobId,
|
||||
onProgress: (progressUpdate) => {
|
||||
const numericProgress =
|
||||
typeof progressUpdate === 'number'
|
||||
? progressUpdate
|
||||
: (progressUpdate as any).progress;
|
||||
if (typeof numericProgress === 'number') {
|
||||
this.emitJobStatus(internalJobId, { progress: numericProgress });
|
||||
}
|
||||
},
|
||||
onOutput: (data: string) => {
|
||||
this.logger.debug(`[${internalJobId} Processor Output]: ${data}`);
|
||||
},
|
||||
onError: (errorMsg: string) => {
|
||||
this.logger.warn(`[${internalJobId} Processor Error]: ${errorMsg}`);
|
||||
},
|
||||
};
|
||||
|
||||
try {
|
||||
this.logger.debug(`[${internalJobId}] Executing source processor...`);
|
||||
sourceResult = await sourceProcessor.execute(jobConfig.sourceConfig, processorOptions);
|
||||
this.logger.debug(
|
||||
`[${internalJobId}] Source processor execution completed. Success: ${sourceResult.success}, OutputPath: ${sourceResult.outputPath}`
|
||||
);
|
||||
|
||||
if (!sourceResult.success || !sourceResult.outputPath) {
|
||||
const errorMsg =
|
||||
sourceResult.error || 'Source processor failed to produce an output path.';
|
||||
this.logger.error(`[${internalJobId}] Source processor failed: ${errorMsg}`);
|
||||
this.emitJobStatus(internalJobId, {
|
||||
status: BackupJobStatus.FAILED,
|
||||
error: errorMsg,
|
||||
message: 'Source processing failed.',
|
||||
});
|
||||
this.jobTrackingService.updateJobStatus(internalJobId, {
|
||||
status: BackupJobStatus.FAILED,
|
||||
error: errorMsg,
|
||||
});
|
||||
// Call handleJobCompletion before throwing
|
||||
await this.backupConfigService.handleJobCompletion(
|
||||
configId,
|
||||
BackupJobStatus.FAILED,
|
||||
internalJobId
|
||||
);
|
||||
throw new Error(errorMsg);
|
||||
}
|
||||
this.emitJobStatus(internalJobId, {
|
||||
progress: 50,
|
||||
message: 'Source processing complete. Starting destination processing.',
|
||||
});
|
||||
|
||||
this.logger.debug(
|
||||
`[${internalJobId}] Executing destination processor with source output: ${sourceResult.outputPath}...`
|
||||
);
|
||||
destinationResult = await destinationProcessor.execute(
|
||||
sourceResult.outputPath,
|
||||
jobConfig.destinationConfig,
|
||||
processorOptions
|
||||
);
|
||||
this.logger.debug(
|
||||
`[${internalJobId}] Destination processor execution completed. Success: ${destinationResult.success}`
|
||||
);
|
||||
|
||||
if (!destinationResult.success) {
|
||||
const errorMsg = destinationResult.error || 'Destination processor failed.';
|
||||
this.logger.error(`[${internalJobId}] Destination processor failed: ${errorMsg}`);
|
||||
this.emitJobStatus(internalJobId, {
|
||||
status: BackupJobStatus.FAILED,
|
||||
error: errorMsg,
|
||||
message: 'Destination processing failed.',
|
||||
});
|
||||
this.jobTrackingService.updateJobStatus(internalJobId, {
|
||||
status: BackupJobStatus.FAILED,
|
||||
error: errorMsg,
|
||||
});
|
||||
// Call handleJobCompletion before throwing
|
||||
await this.backupConfigService.handleJobCompletion(
|
||||
configId,
|
||||
BackupJobStatus.FAILED,
|
||||
internalJobId
|
||||
);
|
||||
throw new Error(errorMsg);
|
||||
}
|
||||
|
||||
this.logger.log(
|
||||
`Regular backup job ${jobConfig.name} (Internal ID: ${internalJobId}) completed successfully.`
|
||||
);
|
||||
this.emitJobStatus(internalJobId, {
|
||||
status: BackupJobStatus.COMPLETED,
|
||||
progress: 100,
|
||||
message: 'Backup completed successfully.',
|
||||
});
|
||||
// Call handleJobCompletion on success
|
||||
await this.backupConfigService.handleJobCompletion(
|
||||
configId,
|
||||
BackupJobStatus.COMPLETED,
|
||||
internalJobId
|
||||
);
|
||||
|
||||
if (sourceResult && sourceProcessor.cleanup) {
|
||||
this.logger.debug(
|
||||
`[${internalJobId}] Performing post-success cleanup for source processor...`
|
||||
);
|
||||
await sourceProcessor.cleanup(sourceResult);
|
||||
}
|
||||
if (destinationResult && destinationProcessor.cleanup) {
|
||||
this.logger.debug(
|
||||
`[${internalJobId}] Performing post-success cleanup for destination processor...`
|
||||
);
|
||||
await destinationProcessor.cleanup(destinationResult);
|
||||
}
|
||||
} catch (e) {
|
||||
const error = e as Error;
|
||||
this.logger.error(
|
||||
`Regular backup job ${jobConfig.name} (Internal ID: ${internalJobId}) failed: ${error.message}`,
|
||||
error.stack
|
||||
);
|
||||
|
||||
this.emitJobStatus(internalJobId, {
|
||||
status: BackupJobStatus.FAILED,
|
||||
error: error.message,
|
||||
message: 'Backup failed during regular execution.',
|
||||
});
|
||||
this.jobTrackingService.updateJobStatus(internalJobId, {
|
||||
status: BackupJobStatus.FAILED,
|
||||
error: error.message,
|
||||
});
|
||||
// Call handleJobCompletion on failure
|
||||
await this.backupConfigService.handleJobCompletion(
|
||||
configId,
|
||||
BackupJobStatus.FAILED,
|
||||
internalJobId
|
||||
);
|
||||
|
||||
this.logger.error(
|
||||
`[${internalJobId}] Performing cleanup due to failure for job ${jobConfig.name}...`
|
||||
);
|
||||
if (sourceResult && sourceProcessor.cleanup) {
|
||||
try {
|
||||
this.logger.debug(
|
||||
`[${internalJobId}] Cleaning up source processor after failure...`
|
||||
);
|
||||
await sourceProcessor.cleanup({
|
||||
...sourceResult,
|
||||
success: false,
|
||||
error: sourceResult.error || error.message,
|
||||
});
|
||||
} catch (cleanupError) {
|
||||
this.logger.error(
|
||||
`[${internalJobId}] Error during source processor cleanup: ${(cleanupError as Error).message}`,
|
||||
(cleanupError as Error).stack
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (destinationResult && destinationProcessor.cleanup) {
|
||||
try {
|
||||
this.logger.debug(
|
||||
`[${internalJobId}] Cleaning up destination processor after failure...`
|
||||
);
|
||||
await destinationProcessor.cleanup({
|
||||
...destinationResult,
|
||||
success: false,
|
||||
error: destinationResult.error || error.message,
|
||||
});
|
||||
} catch (cleanupError) {
|
||||
this.logger.error(
|
||||
`[${internalJobId}] Error during destination processor cleanup: ${(cleanupError as Error).message}`,
|
||||
(cleanupError as Error).stack
|
||||
);
|
||||
}
|
||||
} else if (sourceResult?.success && destinationProcessor.cleanup) {
|
||||
try {
|
||||
this.logger.debug(
|
||||
`[${internalJobId}] Cleaning up destination processor after a failure (destinationResult not available)...`
|
||||
);
|
||||
await destinationProcessor.cleanup({
|
||||
success: false,
|
||||
error: error.message,
|
||||
cleanupRequired: true,
|
||||
});
|
||||
} catch (cleanupError) {
|
||||
this.logger.error(
|
||||
`[${internalJobId}] Error during destination processor cleanup (no result): ${(cleanupError as Error).message}`,
|
||||
(cleanupError as Error).stack
|
||||
);
|
||||
}
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private emitJobStatus(
|
||||
internalJobId: string,
|
||||
statusUpdate: {
|
||||
status?: BackupJobStatus;
|
||||
progress?: number;
|
||||
message?: string;
|
||||
error?: string;
|
||||
}
|
||||
): void {
|
||||
this.logger.log(
|
||||
`[Job Status Update - ${internalJobId}]: Status: ${statusUpdate.status}, Progress: ${statusUpdate.progress}, Msg: ${statusUpdate.message}, Err: ${statusUpdate.error}`
|
||||
);
|
||||
|
||||
const updatePayload: Partial<Omit<JobStatus, 'externalJobId' | 'startTime' | 'name' | 'id'>> = {
|
||||
...statusUpdate,
|
||||
};
|
||||
this.jobTrackingService.updateJobStatus(internalJobId, updatePayload);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,503 @@
|
||||
import type { LabelElement, SchemaBasedCondition } from '@jsonforms/core';
|
||||
import { JsonSchema7, RuleEffect } from '@jsonforms/core';
|
||||
|
||||
import type { DataSlice, SettingSlice, UIElement } from '@app/unraid-api/types/json-forms.js';
|
||||
import { SourceType } from '@app/unraid-api/graph/resolvers/backup/source/backup-source.types.js';
|
||||
import { createLabeledControl } from '@app/unraid-api/graph/utils/form-utils.js';
|
||||
|
||||
export function getSourceConfigSlice(): SettingSlice {
|
||||
const sourceConfigElements: UIElement[] = [
|
||||
{
|
||||
type: 'Control',
|
||||
scope: '#/properties/sourceConfig/properties/type',
|
||||
options: {
|
||||
format: 'radio',
|
||||
radioLayout: 'horizontal',
|
||||
options: [
|
||||
{
|
||||
label: 'ZFS Snapshot',
|
||||
value: SourceType.ZFS,
|
||||
description: 'Create ZFS snapshot and backup',
|
||||
},
|
||||
{
|
||||
label: 'Flash Drive',
|
||||
value: SourceType.FLASH,
|
||||
description: 'Backup flash drive contents',
|
||||
},
|
||||
{
|
||||
label: 'Custom Script',
|
||||
value: SourceType.SCRIPT,
|
||||
description: 'Run custom script to generate backup data',
|
||||
},
|
||||
{
|
||||
label: 'Raw Files',
|
||||
value: SourceType.RAW,
|
||||
description: 'Direct file backup without preprocessing',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
|
||||
createLabeledControl({
|
||||
scope: '#/properties/sourceConfig/properties/timeout',
|
||||
label: 'Timeout',
|
||||
description: 'Timeout in seconds for backup operation',
|
||||
controlOptions: {
|
||||
placeholder: '3600',
|
||||
format: 'number',
|
||||
},
|
||||
}),
|
||||
|
||||
createLabeledControl({
|
||||
scope: '#/properties/sourceConfig/properties/cleanupOnFailure',
|
||||
label: 'Cleanup on Failure',
|
||||
description: 'Clean up backup artifacts on failure',
|
||||
controlOptions: {
|
||||
format: 'toggle',
|
||||
},
|
||||
}),
|
||||
|
||||
// Raw Backup Configuration
|
||||
{
|
||||
type: 'VerticalLayout',
|
||||
rule: {
|
||||
effect: RuleEffect.SHOW,
|
||||
condition: {
|
||||
scope: '#/properties/sourceConfig/properties/type',
|
||||
schema: { const: SourceType.RAW },
|
||||
} as SchemaBasedCondition,
|
||||
},
|
||||
elements: [
|
||||
{
|
||||
type: 'Label',
|
||||
text: 'Raw Backup Configuration',
|
||||
options: {
|
||||
description: 'Configure direct file/folder backup settings.',
|
||||
},
|
||||
} as LabelElement,
|
||||
|
||||
createLabeledControl({
|
||||
scope: '#/properties/sourceConfig/properties/rawConfig/properties/sourcePath',
|
||||
label: 'Source Path',
|
||||
description: 'Source path to backup',
|
||||
controlOptions: {
|
||||
placeholder: '/mnt/user/data',
|
||||
},
|
||||
}),
|
||||
|
||||
createLabeledControl({
|
||||
scope: '#/properties/sourceConfig/properties/rawConfig/properties/excludePatterns',
|
||||
label: 'Exclude Patterns',
|
||||
description: 'Patterns to exclude from backup',
|
||||
controlOptions: {
|
||||
placeholder: '*.tmp,*.log',
|
||||
},
|
||||
}),
|
||||
|
||||
createLabeledControl({
|
||||
scope: '#/properties/sourceConfig/properties/rawConfig/properties/includePatterns',
|
||||
label: 'Include Patterns',
|
||||
description: 'Patterns to include in backup',
|
||||
controlOptions: {
|
||||
placeholder: '*.txt,*.doc',
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
|
||||
// ZFS Configuration
|
||||
{
|
||||
type: 'VerticalLayout',
|
||||
rule: {
|
||||
effect: RuleEffect.SHOW,
|
||||
condition: {
|
||||
scope: '#/properties/sourceConfig/properties/type',
|
||||
schema: { const: SourceType.ZFS },
|
||||
} as SchemaBasedCondition,
|
||||
},
|
||||
elements: [
|
||||
{
|
||||
type: 'Label',
|
||||
text: 'ZFS Configuration',
|
||||
options: {
|
||||
description: 'Configure ZFS snapshot settings for backup.',
|
||||
},
|
||||
} as LabelElement,
|
||||
|
||||
createLabeledControl({
|
||||
scope: '#/properties/sourceConfig/properties/zfsConfig/properties/poolName',
|
||||
label: 'Pool Name',
|
||||
description: 'ZFS pool name',
|
||||
controlOptions: {
|
||||
placeholder: 'tank',
|
||||
},
|
||||
}),
|
||||
|
||||
createLabeledControl({
|
||||
scope: '#/properties/sourceConfig/properties/zfsConfig/properties/datasetName',
|
||||
label: 'Dataset Name',
|
||||
description: 'ZFS dataset name',
|
||||
controlOptions: {
|
||||
placeholder: 'data',
|
||||
},
|
||||
}),
|
||||
|
||||
createLabeledControl({
|
||||
scope: '#/properties/sourceConfig/properties/zfsConfig/properties/snapshotPrefix',
|
||||
label: 'Snapshot Prefix',
|
||||
description: 'Prefix for snapshot names',
|
||||
controlOptions: {
|
||||
placeholder: 'backup',
|
||||
},
|
||||
}),
|
||||
|
||||
createLabeledControl({
|
||||
scope: '#/properties/sourceConfig/properties/zfsConfig/properties/cleanupSnapshots',
|
||||
label: 'Cleanup Snapshots',
|
||||
description: 'Clean up snapshots after backup',
|
||||
controlOptions: {
|
||||
format: 'checkbox',
|
||||
},
|
||||
}),
|
||||
|
||||
createLabeledControl({
|
||||
scope: '#/properties/sourceConfig/properties/zfsConfig/properties/retainSnapshots',
|
||||
label: 'Retain Snapshots',
|
||||
description: 'Number of snapshots to retain',
|
||||
controlOptions: {
|
||||
placeholder: '5',
|
||||
format: 'number',
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
|
||||
// Flash Configuration
|
||||
{
|
||||
type: 'VerticalLayout',
|
||||
rule: {
|
||||
effect: RuleEffect.SHOW,
|
||||
condition: {
|
||||
scope: '#/properties/sourceConfig/properties/type',
|
||||
schema: { const: SourceType.FLASH },
|
||||
} as SchemaBasedCondition,
|
||||
},
|
||||
elements: [
|
||||
{
|
||||
type: 'Label',
|
||||
text: 'Flash Backup Configuration',
|
||||
options: {
|
||||
description: 'Configure Unraid flash drive backup settings.',
|
||||
},
|
||||
} as LabelElement,
|
||||
|
||||
createLabeledControl({
|
||||
scope: '#/properties/sourceConfig/properties/flashConfig/properties/flashPath',
|
||||
label: 'Flash Path',
|
||||
description: 'Path to flash drive',
|
||||
controlOptions: {
|
||||
placeholder: '/boot',
|
||||
},
|
||||
}),
|
||||
|
||||
createLabeledControl({
|
||||
scope: '#/properties/sourceConfig/properties/flashConfig/properties/includeGitHistory',
|
||||
label: 'Include Git History',
|
||||
description: 'Include git history in backup',
|
||||
controlOptions: {
|
||||
format: 'checkbox',
|
||||
},
|
||||
}),
|
||||
|
||||
createLabeledControl({
|
||||
scope: '#/properties/sourceConfig/properties/flashConfig/properties/additionalPaths',
|
||||
label: 'Additional Paths',
|
||||
description: 'Additional paths to include',
|
||||
controlOptions: {
|
||||
placeholder: '/etc/config',
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
|
||||
// Script Configuration
|
||||
{
|
||||
type: 'VerticalLayout',
|
||||
rule: {
|
||||
effect: RuleEffect.SHOW,
|
||||
condition: {
|
||||
scope: '#/properties/sourceConfig/properties/type',
|
||||
schema: { const: SourceType.SCRIPT },
|
||||
} as SchemaBasedCondition,
|
||||
},
|
||||
elements: [
|
||||
{
|
||||
type: 'Label',
|
||||
text: 'Custom Script Configuration',
|
||||
options: {
|
||||
description: 'Configure custom script execution settings.',
|
||||
},
|
||||
} as LabelElement,
|
||||
|
||||
createLabeledControl({
|
||||
scope: '#/properties/sourceConfig/properties/scriptConfig/properties/scriptPath',
|
||||
label: 'Script Path',
|
||||
description: 'Path to script file',
|
||||
controlOptions: {
|
||||
placeholder: '/usr/local/bin/backup.sh',
|
||||
},
|
||||
}),
|
||||
|
||||
createLabeledControl({
|
||||
scope: '#/properties/sourceConfig/properties/scriptConfig/properties/scriptArgs',
|
||||
label: 'Script Arguments',
|
||||
description: 'Arguments for script',
|
||||
controlOptions: {
|
||||
placeholder: '--verbose --compress',
|
||||
},
|
||||
}),
|
||||
|
||||
createLabeledControl({
|
||||
scope: '#/properties/sourceConfig/properties/scriptConfig/properties/workingDirectory',
|
||||
label: 'Working Directory',
|
||||
description: 'Working directory for script',
|
||||
controlOptions: {
|
||||
placeholder: '/tmp',
|
||||
},
|
||||
}),
|
||||
|
||||
createLabeledControl({
|
||||
scope: '#/properties/sourceConfig/properties/scriptConfig/properties/outputPath',
|
||||
label: 'Output Path',
|
||||
description: 'Path for script output',
|
||||
controlOptions: {
|
||||
placeholder: '/tmp/backup.tar.gz',
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const sourceConfigProperties: Record<string, JsonSchema7> = {
|
||||
sourceConfig: {
|
||||
type: 'object',
|
||||
title: 'Source Configuration',
|
||||
description: 'Configuration for backup source',
|
||||
properties: {
|
||||
type: {
|
||||
type: 'string',
|
||||
title: 'Backup Type',
|
||||
description: 'Type of backup to perform',
|
||||
enum: [SourceType.ZFS, SourceType.FLASH, SourceType.SCRIPT, SourceType.RAW],
|
||||
default: SourceType.ZFS,
|
||||
},
|
||||
timeout: {
|
||||
type: 'integer',
|
||||
title: 'Timeout',
|
||||
description: 'Timeout in seconds for backup operation',
|
||||
minimum: 30,
|
||||
maximum: 86400,
|
||||
default: 3600,
|
||||
},
|
||||
cleanupOnFailure: {
|
||||
type: 'boolean',
|
||||
title: 'Cleanup on Failure',
|
||||
description: 'Clean up backup artifacts on failure',
|
||||
default: true,
|
||||
},
|
||||
rawConfig: {
|
||||
type: 'object',
|
||||
title: 'Raw Backup Configuration',
|
||||
properties: {
|
||||
sourcePath: {
|
||||
type: 'string',
|
||||
title: 'Source Path',
|
||||
description: 'Source path to backup',
|
||||
minLength: 1,
|
||||
},
|
||||
excludePatterns: {
|
||||
type: 'array',
|
||||
title: 'Exclude Patterns',
|
||||
description: 'Patterns to exclude from backup',
|
||||
items: {
|
||||
type: 'string',
|
||||
},
|
||||
default: [],
|
||||
},
|
||||
includePatterns: {
|
||||
type: 'array',
|
||||
title: 'Include Patterns',
|
||||
description: 'Patterns to include in backup',
|
||||
items: {
|
||||
type: 'string',
|
||||
},
|
||||
default: [],
|
||||
},
|
||||
},
|
||||
required: ['sourcePath'],
|
||||
},
|
||||
zfsConfig: {
|
||||
type: 'object',
|
||||
title: 'ZFS Configuration',
|
||||
properties: {
|
||||
poolName: {
|
||||
type: 'string',
|
||||
title: 'Pool Name',
|
||||
description: 'ZFS pool name',
|
||||
minLength: 1,
|
||||
},
|
||||
datasetName: {
|
||||
type: 'string',
|
||||
title: 'Dataset Name',
|
||||
description: 'ZFS dataset name',
|
||||
minLength: 1,
|
||||
},
|
||||
snapshotPrefix: {
|
||||
type: 'string',
|
||||
title: 'Snapshot Prefix',
|
||||
description: 'Prefix for snapshot names',
|
||||
default: 'backup',
|
||||
},
|
||||
cleanupSnapshots: {
|
||||
type: 'boolean',
|
||||
title: 'Cleanup Snapshots',
|
||||
description: 'Clean up snapshots after backup',
|
||||
default: true,
|
||||
},
|
||||
retainSnapshots: {
|
||||
type: 'integer',
|
||||
title: 'Retain Snapshots',
|
||||
description: 'Number of snapshots to retain',
|
||||
minimum: 0,
|
||||
default: 5,
|
||||
},
|
||||
},
|
||||
required: ['poolName', 'datasetName'],
|
||||
},
|
||||
flashConfig: {
|
||||
type: 'object',
|
||||
title: 'Flash Configuration',
|
||||
properties: {
|
||||
flashPath: {
|
||||
type: 'string',
|
||||
title: 'Flash Path',
|
||||
description: 'Path to flash drive',
|
||||
default: '/boot',
|
||||
},
|
||||
includeGitHistory: {
|
||||
type: 'boolean',
|
||||
title: 'Include Git History',
|
||||
description: 'Include git history in backup',
|
||||
default: true,
|
||||
},
|
||||
additionalPaths: {
|
||||
type: 'array',
|
||||
title: 'Additional Paths',
|
||||
description: 'Additional paths to include',
|
||||
items: {
|
||||
type: 'string',
|
||||
},
|
||||
default: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
scriptConfig: {
|
||||
type: 'object',
|
||||
title: 'Script Configuration',
|
||||
properties: {
|
||||
scriptPath: {
|
||||
type: 'string',
|
||||
title: 'Script Path',
|
||||
description: 'Path to script file',
|
||||
minLength: 1,
|
||||
},
|
||||
scriptArgs: {
|
||||
type: 'array',
|
||||
title: 'Script Arguments',
|
||||
description: 'Arguments for script',
|
||||
items: {
|
||||
type: 'string',
|
||||
},
|
||||
default: [],
|
||||
},
|
||||
workingDirectory: {
|
||||
type: 'string',
|
||||
title: 'Working Directory',
|
||||
description: 'Working directory for script',
|
||||
default: '/tmp',
|
||||
},
|
||||
outputPath: {
|
||||
type: 'string',
|
||||
title: 'Output Path',
|
||||
description: 'Path for script output',
|
||||
minLength: 1,
|
||||
},
|
||||
},
|
||||
required: ['scriptPath', 'outputPath'],
|
||||
},
|
||||
},
|
||||
required: ['type'],
|
||||
},
|
||||
};
|
||||
|
||||
// Apply conditional logic for sourceConfig
|
||||
if (sourceConfigProperties.sourceConfig && typeof sourceConfigProperties.sourceConfig === 'object') {
|
||||
sourceConfigProperties.sourceConfig.allOf = [
|
||||
{
|
||||
if: { properties: { type: { const: SourceType.RAW } }, required: ['type'] },
|
||||
then: {
|
||||
required: ['rawConfig'],
|
||||
properties: {
|
||||
zfsConfig: { not: {} },
|
||||
flashConfig: { not: {} },
|
||||
scriptConfig: { not: {} },
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
if: { properties: { type: { const: SourceType.ZFS } }, required: ['type'] },
|
||||
then: {
|
||||
required: ['zfsConfig'],
|
||||
properties: {
|
||||
rawConfig: { not: {} },
|
||||
flashConfig: { not: {} },
|
||||
scriptConfig: { not: {} },
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
if: { properties: { type: { const: SourceType.FLASH } }, required: ['type'] },
|
||||
then: {
|
||||
required: ['flashConfig'],
|
||||
properties: {
|
||||
rawConfig: { not: {} },
|
||||
zfsConfig: { not: {} },
|
||||
scriptConfig: { not: {} },
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
if: { properties: { type: { const: SourceType.SCRIPT } }, required: ['type'] },
|
||||
then: {
|
||||
required: ['scriptConfig'],
|
||||
properties: {
|
||||
rawConfig: { not: {} },
|
||||
zfsConfig: { not: {} },
|
||||
flashConfig: { not: {} },
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
const verticalLayoutElement: UIElement = {
|
||||
type: 'VerticalLayout',
|
||||
elements: sourceConfigElements,
|
||||
options: { step: 1 },
|
||||
};
|
||||
|
||||
return {
|
||||
properties: sourceConfigProperties,
|
||||
elements: [verticalLayoutElement],
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,53 @@
|
||||
import { Readable } from 'stream';
|
||||
|
||||
import { SourceType } from '@app/unraid-api/graph/resolvers/backup/source/backup-source.types.js';
|
||||
|
||||
export interface BackupSourceConfig {
|
||||
timeout: number;
|
||||
cleanupOnFailure: boolean;
|
||||
}
|
||||
|
||||
export interface BackupSourceResult {
|
||||
success: boolean;
|
||||
outputPath?: string;
|
||||
streamPath?: string;
|
||||
snapshotName?: string;
|
||||
error?: string;
|
||||
cleanupRequired?: boolean;
|
||||
metadata?: Record<string, unknown>;
|
||||
|
||||
// Streaming support
|
||||
streamCommand?: string;
|
||||
streamArgs?: string[];
|
||||
supportsStreaming?: boolean;
|
||||
isStreamingMode?: boolean;
|
||||
}
|
||||
|
||||
export interface BackupSourceProcessorOptions {
|
||||
jobId?: string;
|
||||
onProgress?: (progress: number) => void;
|
||||
onOutput?: (data: string) => void;
|
||||
onError?: (error: string) => void;
|
||||
useStreaming?: boolean;
|
||||
}
|
||||
|
||||
export abstract class BackupSourceProcessor<TConfig extends BackupSourceConfig> {
|
||||
abstract readonly sourceType: SourceType;
|
||||
|
||||
abstract execute(
|
||||
config: TConfig,
|
||||
options?: BackupSourceProcessorOptions
|
||||
): Promise<BackupSourceResult>;
|
||||
|
||||
abstract validate(config: TConfig): Promise<{ valid: boolean; error?: string; warnings?: string[] }>;
|
||||
|
||||
abstract cleanup(result: BackupSourceResult): Promise<void>;
|
||||
|
||||
// Getter to check if processor supports streaming
|
||||
abstract get supportsStreaming(): boolean;
|
||||
|
||||
// Optional getter to get a readable stream for streaming backups
|
||||
get getReadableStream(): ((config: TConfig) => Promise<Readable>) | undefined {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { BackupSourceService } from '@app/unraid-api/graph/resolvers/backup/source/backup-source.service.js';
|
||||
import { FlashSourceProcessor } from '@app/unraid-api/graph/resolvers/backup/source/flash/flash-source-processor.service.js';
|
||||
import { FlashValidationService } from '@app/unraid-api/graph/resolvers/backup/source/flash/flash-validation.service.js';
|
||||
import { RawSourceProcessor } from '@app/unraid-api/graph/resolvers/backup/source/raw/raw-source-processor.service.js';
|
||||
import { ScriptSourceProcessor } from '@app/unraid-api/graph/resolvers/backup/source/script/script-source-processor.service.js';
|
||||
import { ZfsSourceProcessor } from '@app/unraid-api/graph/resolvers/backup/source/zfs/zfs-source-processor.service.js';
|
||||
import { ZfsValidationService } from '@app/unraid-api/graph/resolvers/backup/source/zfs/zfs-validation.service.js';
|
||||
|
||||
@Module({
|
||||
providers: [
|
||||
BackupSourceService,
|
||||
FlashSourceProcessor,
|
||||
FlashValidationService,
|
||||
RawSourceProcessor,
|
||||
ScriptSourceProcessor,
|
||||
ZfsSourceProcessor,
|
||||
ZfsValidationService,
|
||||
],
|
||||
exports: [
|
||||
BackupSourceService,
|
||||
FlashSourceProcessor,
|
||||
RawSourceProcessor,
|
||||
ScriptSourceProcessor,
|
||||
ZfsSourceProcessor,
|
||||
ZfsValidationService,
|
||||
],
|
||||
})
|
||||
export class BackupSourceModule {}
|
||||
@@ -0,0 +1,99 @@
|
||||
import { BadRequestException, Injectable, Logger } from '@nestjs/common';
|
||||
import { EventEmitter } from 'events';
|
||||
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
|
||||
import {
|
||||
BackupSourceConfig,
|
||||
BackupSourceProcessor,
|
||||
BackupSourceProcessorOptions,
|
||||
BackupSourceResult,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/source/backup-source-processor.interface.js';
|
||||
import { SourceType } from '@app/unraid-api/graph/resolvers/backup/source/backup-source.types.js';
|
||||
import {
|
||||
FlashSourceConfig,
|
||||
FlashSourceProcessor,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/source/flash/flash-source-processor.service.js';
|
||||
import {
|
||||
RawSourceConfig,
|
||||
RawSourceProcessor,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/source/raw/raw-source-processor.service.js';
|
||||
import {
|
||||
ScriptSourceConfig,
|
||||
ScriptSourceProcessor,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/source/script/script-source-processor.service.js';
|
||||
import {
|
||||
ZfsSourceConfig,
|
||||
ZfsSourceProcessor,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/source/zfs/zfs-source-processor.service.js';
|
||||
|
||||
export interface BackupSourceOptions {
|
||||
jobId?: string;
|
||||
onProgress?: (progress: number) => void;
|
||||
onOutput?: (data: string) => void;
|
||||
onError?: (error: string) => void;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class BackupSourceService extends EventEmitter {
|
||||
private readonly logger = new Logger(BackupSourceService.name);
|
||||
|
||||
constructor(
|
||||
private readonly flashSourceProcessor: FlashSourceProcessor,
|
||||
private readonly rawSourceProcessor: RawSourceProcessor,
|
||||
private readonly scriptSourceProcessor: ScriptSourceProcessor,
|
||||
private readonly zfsSourceProcessor: ZfsSourceProcessor
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
async processSource<T extends BackupSourceConfig & { type: SourceType }>(
|
||||
config: T,
|
||||
options?: BackupSourceOptions
|
||||
): Promise<BackupSourceResult> {
|
||||
const processor = this.getProcessor(config.type);
|
||||
if (!processor) {
|
||||
throw new BadRequestException(`Unsupported source type: ${config.type}`);
|
||||
}
|
||||
|
||||
const processorOptions: BackupSourceProcessorOptions = {
|
||||
jobId: options?.jobId || uuidv4(),
|
||||
onProgress: options?.onProgress,
|
||||
onOutput: options?.onOutput,
|
||||
onError: options?.onError,
|
||||
};
|
||||
|
||||
try {
|
||||
const result = await processor.execute(config, processorOptions);
|
||||
this.logger.log(`Source processing completed for type: ${config.type}`);
|
||||
return result;
|
||||
} catch (error) {
|
||||
this.logger.error(`Source processing failed for type: ${config.type}`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async cancelSourceJob(jobId: string): Promise<boolean> {
|
||||
this.logger.log(`Attempting to cancel source job: ${jobId}`);
|
||||
return false;
|
||||
}
|
||||
|
||||
async cleanup(): Promise<void> {
|
||||
this.logger.log('Cleaning up source service...');
|
||||
}
|
||||
|
||||
public getProcessor(type: SourceType): BackupSourceProcessor<any> | null {
|
||||
switch (type) {
|
||||
case SourceType.FLASH:
|
||||
return this.flashSourceProcessor;
|
||||
case SourceType.RAW:
|
||||
return this.rawSourceProcessor;
|
||||
case SourceType.SCRIPT:
|
||||
return this.scriptSourceProcessor;
|
||||
case SourceType.ZFS:
|
||||
return this.zfsSourceProcessor;
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,136 @@
|
||||
import { createUnionType, Field, InputType, ObjectType, registerEnumType } from '@nestjs/graphql';
|
||||
|
||||
import { Type } from 'class-transformer';
|
||||
import { IsBoolean, IsEnum, IsNumber, IsOptional, Min, ValidateNested } from 'class-validator';
|
||||
|
||||
import {
|
||||
FlashPreprocessConfig,
|
||||
FlashPreprocessConfigInput,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/source/flash/flash-source.types.js';
|
||||
import {
|
||||
RawBackupConfig,
|
||||
RawBackupConfigInput,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/source/raw/raw-source.types.js';
|
||||
import {
|
||||
ScriptPreprocessConfig,
|
||||
ScriptPreprocessConfigInput,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/source/script/script-source.types.js';
|
||||
import {
|
||||
ZfsPreprocessConfig,
|
||||
ZfsPreprocessConfigInput,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/source/zfs/zfs-source.types.js';
|
||||
|
||||
export enum SourceType {
|
||||
ZFS = 'ZFS',
|
||||
FLASH = 'FLASH',
|
||||
SCRIPT = 'SCRIPT',
|
||||
RAW = 'RAW',
|
||||
}
|
||||
|
||||
registerEnumType(SourceType, {
|
||||
name: 'SourceType',
|
||||
description:
|
||||
'Type of backup to perform (ZFS snapshot, Flash backup, Custom script, or Raw file backup)',
|
||||
});
|
||||
|
||||
export { ZfsPreprocessConfigInput, ZfsPreprocessConfig };
|
||||
export { FlashPreprocessConfigInput, FlashPreprocessConfig };
|
||||
export { ScriptPreprocessConfigInput, ScriptPreprocessConfig };
|
||||
export { RawBackupConfigInput, RawBackupConfig };
|
||||
|
||||
@InputType()
|
||||
export class SourceConfigInput {
|
||||
@Field(() => SourceType, { nullable: false })
|
||||
@IsEnum(SourceType, { message: 'Invalid source type' })
|
||||
type!: SourceType;
|
||||
|
||||
@Field(() => Number, { description: 'Timeout for backup operation in seconds', defaultValue: 3600 })
|
||||
@IsOptional()
|
||||
@IsNumber()
|
||||
@Min(1)
|
||||
timeout?: number;
|
||||
|
||||
@Field(() => Boolean, { description: 'Whether to cleanup on failure', defaultValue: true })
|
||||
@IsOptional()
|
||||
@IsBoolean()
|
||||
cleanupOnFailure?: boolean;
|
||||
|
||||
@Field(() => ZfsPreprocessConfigInput, { nullable: true })
|
||||
@IsOptional()
|
||||
@ValidateNested()
|
||||
@Type(() => ZfsPreprocessConfigInput)
|
||||
zfsConfig?: ZfsPreprocessConfigInput;
|
||||
|
||||
@Field(() => FlashPreprocessConfigInput, { nullable: true })
|
||||
@IsOptional()
|
||||
@ValidateNested()
|
||||
@Type(() => FlashPreprocessConfigInput)
|
||||
flashConfig?: FlashPreprocessConfigInput;
|
||||
|
||||
@Field(() => ScriptPreprocessConfigInput, { nullable: true })
|
||||
@IsOptional()
|
||||
@ValidateNested()
|
||||
@Type(() => ScriptPreprocessConfigInput)
|
||||
scriptConfig?: ScriptPreprocessConfigInput;
|
||||
|
||||
@Field(() => RawBackupConfigInput, { nullable: true })
|
||||
@IsOptional()
|
||||
@ValidateNested()
|
||||
@Type(() => RawBackupConfigInput)
|
||||
rawConfig?: RawBackupConfigInput;
|
||||
}
|
||||
|
||||
@ObjectType()
|
||||
export class SourceConfig {
|
||||
@Field(() => Number)
|
||||
timeout!: number;
|
||||
|
||||
@Field(() => Boolean)
|
||||
cleanupOnFailure!: boolean;
|
||||
|
||||
@Field(() => ZfsPreprocessConfig, { nullable: true })
|
||||
zfsConfig?: ZfsPreprocessConfig;
|
||||
|
||||
@Field(() => FlashPreprocessConfig, { nullable: true })
|
||||
flashConfig?: FlashPreprocessConfig;
|
||||
|
||||
@Field(() => ScriptPreprocessConfig, { nullable: true })
|
||||
scriptConfig?: ScriptPreprocessConfig;
|
||||
|
||||
@Field(() => RawBackupConfig, { nullable: true })
|
||||
rawConfig?: RawBackupConfig;
|
||||
}
|
||||
|
||||
export const SourceConfigUnion = createUnionType({
|
||||
name: 'SourceConfigUnion',
|
||||
types: () =>
|
||||
[ZfsPreprocessConfig, FlashPreprocessConfig, ScriptPreprocessConfig, RawBackupConfig] as const,
|
||||
resolveType(obj: any, context, info) {
|
||||
if (ZfsPreprocessConfig.isTypeOf && ZfsPreprocessConfig.isTypeOf(obj)) {
|
||||
return ZfsPreprocessConfig;
|
||||
}
|
||||
if (FlashPreprocessConfig.isTypeOf && FlashPreprocessConfig.isTypeOf(obj)) {
|
||||
return FlashPreprocessConfig;
|
||||
}
|
||||
if (ScriptPreprocessConfig.isTypeOf && ScriptPreprocessConfig.isTypeOf(obj)) {
|
||||
return ScriptPreprocessConfig;
|
||||
}
|
||||
if (RawBackupConfig.isTypeOf && RawBackupConfig.isTypeOf(obj)) {
|
||||
return RawBackupConfig;
|
||||
}
|
||||
console.error(`[SourceConfigUnion] Could not resolve type for object: ${JSON.stringify(obj)}`);
|
||||
return null;
|
||||
},
|
||||
});
|
||||
|
||||
export const SourceConfigInputUnion = SourceConfigInput;
|
||||
|
||||
export interface PreprocessResult {
|
||||
success: boolean;
|
||||
streamPath?: string;
|
||||
outputPath?: string;
|
||||
snapshotName?: string;
|
||||
error?: string;
|
||||
cleanupRequired?: boolean;
|
||||
metadata?: Record<string, unknown>;
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
import { Field, InputType, ObjectType } from '@nestjs/graphql';
|
||||
|
||||
import { IsOptional, IsString } from 'class-validator';
|
||||
|
||||
@InputType()
|
||||
export abstract class BaseSourceConfigInput {
|
||||
@Field(() => String, {
|
||||
description: 'Human-readable label for this source configuration',
|
||||
nullable: true,
|
||||
})
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
label?: string;
|
||||
}
|
||||
|
||||
export interface BaseSourceConfig {
|
||||
label: string;
|
||||
}
|
||||
@@ -0,0 +1,307 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { access, mkdir, writeFile } from 'fs/promises';
|
||||
import { dirname, join } from 'path';
|
||||
import { Readable } from 'stream';
|
||||
|
||||
import { execa } from 'execa';
|
||||
|
||||
import {
|
||||
BackupSourceConfig,
|
||||
BackupSourceProcessor,
|
||||
BackupSourceProcessorOptions,
|
||||
BackupSourceResult,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/source/backup-source-processor.interface.js';
|
||||
import { SourceType } from '@app/unraid-api/graph/resolvers/backup/source/backup-source.types.js';
|
||||
import { FlashPreprocessConfigInput } from '@app/unraid-api/graph/resolvers/backup/source/flash/flash-source.types.js';
|
||||
import { FlashValidationService } from '@app/unraid-api/graph/resolvers/backup/source/flash/flash-validation.service.js';
|
||||
|
||||
export interface FlashSourceConfig extends BackupSourceConfig {
|
||||
flashPath: string;
|
||||
includeGitHistory: boolean;
|
||||
additionalPaths?: string[];
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class FlashSourceProcessor extends BackupSourceProcessor<FlashSourceConfig> {
|
||||
readonly sourceType = SourceType.FLASH;
|
||||
private readonly logger = new Logger(FlashSourceProcessor.name);
|
||||
|
||||
constructor(private readonly flashValidationService: FlashValidationService) {
|
||||
super();
|
||||
}
|
||||
|
||||
async execute(
|
||||
config: FlashSourceConfig,
|
||||
options?: BackupSourceProcessorOptions
|
||||
): Promise<BackupSourceResult> {
|
||||
const validation = await this.validate(config);
|
||||
if (!validation.valid) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Flash configuration validation failed: ${validation.error}`,
|
||||
metadata: { validationError: validation.error, validationWarnings: validation.warnings },
|
||||
};
|
||||
}
|
||||
|
||||
if (validation.warnings?.length) {
|
||||
this.logger.warn(`Flash backup warnings: ${validation.warnings.join(', ')}`);
|
||||
}
|
||||
|
||||
const tempGitPath = join(config.flashPath, '.git-backup-temp');
|
||||
let gitRepoInitialized = false;
|
||||
|
||||
try {
|
||||
if (config.includeGitHistory) {
|
||||
gitRepoInitialized = await this.initializeGitRepository(config.flashPath, tempGitPath);
|
||||
if (gitRepoInitialized) {
|
||||
this.logger.log(`Initialized git repository for Flash backup at: ${tempGitPath}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Generate streaming command for tar compression
|
||||
const streamCommand = this.generateStreamCommand(config, gitRepoInitialized, tempGitPath);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
outputPath: config.flashPath,
|
||||
streamPath: config.flashPath,
|
||||
metadata: {
|
||||
flashPath: config.flashPath,
|
||||
gitHistoryIncluded: config.includeGitHistory && gitRepoInitialized,
|
||||
additionalPaths: config.additionalPaths,
|
||||
validationWarnings: validation.warnings,
|
||||
tempGitPath: gitRepoInitialized ? tempGitPath : undefined,
|
||||
streamCommand: streamCommand.command,
|
||||
streamArgs: streamCommand.args,
|
||||
sourceType: this.sourceType,
|
||||
},
|
||||
cleanupRequired: gitRepoInitialized,
|
||||
};
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
this.logger.error(`Flash backup failed: ${errorMessage}`, error);
|
||||
|
||||
if (gitRepoInitialized) {
|
||||
try {
|
||||
await this.cleanupTempGitRepo(tempGitPath);
|
||||
this.logger.log(`Cleaned up temporary git repository after failure: ${tempGitPath}`);
|
||||
} catch (cleanupError) {
|
||||
const cleanupErrorMessage =
|
||||
cleanupError instanceof Error ? cleanupError.message : String(cleanupError);
|
||||
this.logger.error(
|
||||
`Failed to cleanup temporary git repository: ${cleanupErrorMessage}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
cleanupRequired: gitRepoInitialized,
|
||||
metadata: {
|
||||
flashPath: config.flashPath,
|
||||
gitRepoInitialized,
|
||||
cleanupAttempted: gitRepoInitialized,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async validate(
|
||||
config: FlashSourceConfig
|
||||
): Promise<{ valid: boolean; error?: string; warnings?: string[] }> {
|
||||
const legacyConfig: FlashPreprocessConfigInput = {
|
||||
flashPath: config.flashPath,
|
||||
includeGitHistory: config.includeGitHistory,
|
||||
additionalPaths: config.additionalPaths,
|
||||
};
|
||||
|
||||
const validationResult = await this.flashValidationService.validateFlashConfig(legacyConfig);
|
||||
|
||||
return {
|
||||
valid: validationResult.isValid,
|
||||
error: validationResult.errors.length > 0 ? validationResult.errors.join(', ') : undefined,
|
||||
warnings: validationResult.warnings,
|
||||
};
|
||||
}
|
||||
|
||||
async cleanup(result: BackupSourceResult): Promise<void> {
|
||||
if (result.cleanupRequired && result.metadata?.tempGitPath) {
|
||||
await this.cleanupTempGitRepo(result.metadata.tempGitPath as string);
|
||||
}
|
||||
}
|
||||
|
||||
private async initializeGitRepository(flashPath: string, tempGitPath: string): Promise<boolean> {
|
||||
try {
|
||||
const existingGitPath = join(flashPath, '.git');
|
||||
const hasExistingRepo = await this.flashValidationService.validateGitRepository(flashPath);
|
||||
|
||||
if (hasExistingRepo) {
|
||||
await execa('cp', ['-r', existingGitPath, tempGitPath]);
|
||||
this.logger.log('Copied existing git repository to temporary location');
|
||||
return true;
|
||||
}
|
||||
|
||||
await mkdir(tempGitPath, { recursive: true });
|
||||
await execa('git', ['init'], { cwd: tempGitPath });
|
||||
|
||||
const gitignorePath = join(tempGitPath, '.gitignore');
|
||||
const gitignoreContent = [
|
||||
'# Exclude sensitive files',
|
||||
'*.key',
|
||||
'*.pem',
|
||||
'*.p12',
|
||||
'*.pfx',
|
||||
'config/passwd',
|
||||
'config/shadow',
|
||||
'config/ssh/',
|
||||
'config/ssl/',
|
||||
'config/wireguard/',
|
||||
'config/network.cfg',
|
||||
'config/ident.cfg',
|
||||
].join('\n');
|
||||
|
||||
await writeFile(gitignorePath, gitignoreContent);
|
||||
|
||||
await execa('git', ['add', '.'], { cwd: flashPath });
|
||||
await execa(
|
||||
'git',
|
||||
[
|
||||
'-c',
|
||||
'user.name=Unraid Backup',
|
||||
'-c',
|
||||
'user.email=backup@unraid.net',
|
||||
'commit',
|
||||
'-m',
|
||||
'Flash backup snapshot',
|
||||
],
|
||||
{ cwd: flashPath }
|
||||
);
|
||||
|
||||
await execa('mv', [join(flashPath, '.git'), tempGitPath]);
|
||||
|
||||
this.logger.log('Initialized new git repository for Flash backup');
|
||||
return true;
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
this.logger.warn(`Failed to initialize git repository: ${errorMessage}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private async cleanupTempGitRepo(tempGitPath: string): Promise<void> {
|
||||
try {
|
||||
await execa('rm', ['-rf', tempGitPath]);
|
||||
this.logger.log(`Cleaned up temporary git repository: ${tempGitPath}`);
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
this.logger.error(`Failed to cleanup temporary git repository: ${errorMessage}`);
|
||||
}
|
||||
}
|
||||
|
||||
private generateStreamCommand(
|
||||
config: FlashSourceConfig,
|
||||
gitRepoInitialized: boolean,
|
||||
tempGitPath?: string
|
||||
): { command: string; args: string[] } {
|
||||
const excludeArgs: string[] = [];
|
||||
|
||||
// Standard exclusions for flash backups
|
||||
const standardExcludes = ['lost+found', '*.tmp', '*.temp', '.DS_Store', 'Thumbs.db'];
|
||||
|
||||
standardExcludes.forEach((pattern) => {
|
||||
excludeArgs.push('--exclude', pattern);
|
||||
});
|
||||
|
||||
// If git repo was initialized, include it in the backup
|
||||
if (gitRepoInitialized && tempGitPath) {
|
||||
excludeArgs.push('--exclude', '.git-backup-temp');
|
||||
}
|
||||
|
||||
const tarArgs = [
|
||||
'-czf', // create, gzip, file
|
||||
'-', // output to stdout for streaming
|
||||
'-C', // change to directory
|
||||
config.flashPath,
|
||||
...excludeArgs,
|
||||
'.', // backup everything in the directory
|
||||
];
|
||||
|
||||
// Add additional paths if specified
|
||||
if (config.additionalPaths?.length) {
|
||||
config.additionalPaths.forEach((path) => {
|
||||
tarArgs.push('-C', path, '.');
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
command: 'tar',
|
||||
args: tarArgs,
|
||||
};
|
||||
}
|
||||
|
||||
get supportsStreaming(): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
get getReadableStream(): (config: FlashSourceConfig) => Promise<Readable> {
|
||||
return async (config: FlashSourceConfig): Promise<Readable> => {
|
||||
const validation = await this.validate(config);
|
||||
if (!validation.valid) {
|
||||
const errorMsg = `Flash configuration validation failed: ${validation.error}`;
|
||||
this.logger.error(errorMsg);
|
||||
const errorStream = new Readable({
|
||||
read() {
|
||||
this.emit('error', new Error(errorMsg));
|
||||
this.push(null);
|
||||
},
|
||||
});
|
||||
return errorStream;
|
||||
}
|
||||
|
||||
const { command, args } = this.generateStreamCommand(config, false);
|
||||
|
||||
this.logger.log(
|
||||
`[getReadableStream] Streaming flash backup with command: ${command} ${args.join(' ')}`
|
||||
);
|
||||
|
||||
try {
|
||||
const tarProcess = execa(command, args, {
|
||||
cwd: config.flashPath,
|
||||
});
|
||||
|
||||
tarProcess.catch((error) => {
|
||||
this.logger.error(
|
||||
`Error executing tar command for streaming: ${error.message}`,
|
||||
error.stack
|
||||
);
|
||||
});
|
||||
|
||||
if (!tarProcess.stdout) {
|
||||
throw new Error('Failed to get stdout stream from tar process.');
|
||||
}
|
||||
|
||||
tarProcess.stdout.on('end', () => {
|
||||
this.logger.log('[getReadableStream] Tar process stdout stream ended.');
|
||||
});
|
||||
tarProcess.stdout.on('error', (err) => {
|
||||
this.logger.error(
|
||||
`[getReadableStream] Tar process stdout stream error: ${err.message}`
|
||||
);
|
||||
});
|
||||
|
||||
return tarProcess.stdout;
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
this.logger.error(`[getReadableStream] Failed to start tar process: ${errorMessage}`);
|
||||
const errorStream = new Readable({
|
||||
read() {
|
||||
this.emit('error', new Error(errorMessage));
|
||||
this.push(null);
|
||||
},
|
||||
});
|
||||
return errorStream;
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,43 @@
|
||||
import { Field, InputType, ObjectType } from '@nestjs/graphql';
|
||||
|
||||
import { IsBoolean, IsNotEmpty, IsOptional, IsString } from 'class-validator';
|
||||
|
||||
import {
|
||||
BaseSourceConfig,
|
||||
BaseSourceConfigInput,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/source/base-source.types.js';
|
||||
|
||||
@InputType()
|
||||
export class FlashPreprocessConfigInput extends BaseSourceConfigInput {
|
||||
@Field(() => String, { description: 'Flash drive mount path', defaultValue: '/boot' })
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
flashPath!: string;
|
||||
|
||||
@Field(() => Boolean, { description: 'Whether to include git history', defaultValue: true })
|
||||
@IsBoolean()
|
||||
includeGitHistory!: boolean;
|
||||
|
||||
@Field(() => [String], { description: 'Additional paths to include in backup', nullable: true })
|
||||
@IsOptional()
|
||||
additionalPaths?: string[];
|
||||
}
|
||||
|
||||
@ObjectType()
|
||||
export class FlashPreprocessConfig implements BaseSourceConfig {
|
||||
@Field(() => String, { nullable: false })
|
||||
label: string = 'Flash drive backup';
|
||||
|
||||
@Field(() => String)
|
||||
flashPath!: string;
|
||||
|
||||
@Field(() => Boolean)
|
||||
includeGitHistory!: boolean;
|
||||
|
||||
@Field(() => [String], { nullable: true })
|
||||
additionalPaths?: string[];
|
||||
|
||||
static isTypeOf(obj: any): obj is FlashPreprocessConfig {
|
||||
return obj && typeof obj.flashPath === 'string' && typeof obj.includeGitHistory === 'boolean';
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,260 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { access, constants, readdir, stat } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
|
||||
import { execa } from 'execa';
|
||||
|
||||
import { FlashPreprocessConfigInput } from '@app/unraid-api/graph/resolvers/backup/source/flash/flash-source.types.js';
|
||||
|
||||
export interface FlashValidationResult {
|
||||
isValid: boolean;
|
||||
errors: string[];
|
||||
warnings: string[];
|
||||
metadata: {
|
||||
flashPathExists?: boolean;
|
||||
flashPathMounted?: boolean;
|
||||
gitRepoExists?: boolean;
|
||||
gitRepoSize?: number | null;
|
||||
additionalPathsValid?: boolean[];
|
||||
totalSize?: number | null;
|
||||
availableSpace?: number | null;
|
||||
};
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class FlashValidationService {
|
||||
private readonly logger = new Logger(FlashValidationService.name);
|
||||
|
||||
async validateFlashConfig(config: FlashPreprocessConfigInput): Promise<FlashValidationResult> {
|
||||
const result: FlashValidationResult = {
|
||||
isValid: true,
|
||||
errors: [],
|
||||
warnings: [],
|
||||
metadata: {},
|
||||
};
|
||||
|
||||
try {
|
||||
// Validate flash path exists and is accessible
|
||||
const flashPathValid = await this.validateFlashPath(config.flashPath);
|
||||
result.metadata.flashPathExists = flashPathValid;
|
||||
|
||||
if (!flashPathValid) {
|
||||
result.errors.push(
|
||||
`Flash path '${config.flashPath}' does not exist or is not accessible`
|
||||
);
|
||||
result.isValid = false;
|
||||
return result;
|
||||
}
|
||||
|
||||
// Check if flash path is mounted
|
||||
const isMounted = await this.isFlashMounted(config.flashPath);
|
||||
result.metadata.flashPathMounted = isMounted;
|
||||
|
||||
if (!isMounted) {
|
||||
result.warnings.push(`Flash path '${config.flashPath}' may not be properly mounted`);
|
||||
}
|
||||
|
||||
// Validate git repository if includeGitHistory is enabled
|
||||
if (config.includeGitHistory) {
|
||||
const gitRepoExists = await this.validateGitRepository(config.flashPath);
|
||||
result.metadata.gitRepoExists = gitRepoExists;
|
||||
|
||||
if (!gitRepoExists) {
|
||||
result.warnings.push(
|
||||
`Git repository not found in '${config.flashPath}'. Git history will be skipped.`
|
||||
);
|
||||
} else {
|
||||
const gitRepoSize = await this.getGitRepositorySize(config.flashPath);
|
||||
result.metadata.gitRepoSize = gitRepoSize;
|
||||
|
||||
if (gitRepoSize && gitRepoSize > 100 * 1024 * 1024) {
|
||||
// 100MB
|
||||
result.warnings.push(
|
||||
`Git repository is large (${Math.round(gitRepoSize / 1024 / 1024)}MB). Backup may take longer.`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Validate additional paths
|
||||
if (config.additionalPaths && config.additionalPaths.length > 0) {
|
||||
const pathValidations = await Promise.all(
|
||||
config.additionalPaths.map((path) => this.validateAdditionalPath(path))
|
||||
);
|
||||
result.metadata.additionalPathsValid = pathValidations;
|
||||
|
||||
const invalidPaths = config.additionalPaths.filter(
|
||||
(_, index) => !pathValidations[index]
|
||||
);
|
||||
if (invalidPaths.length > 0) {
|
||||
result.warnings.push(
|
||||
`Some additional paths are not accessible: ${invalidPaths.join(', ')}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate total backup size
|
||||
const totalSize = await this.calculateTotalBackupSize(config);
|
||||
result.metadata.totalSize = totalSize;
|
||||
|
||||
// Check available space
|
||||
const availableSpace = await this.getAvailableSpace(config.flashPath);
|
||||
result.metadata.availableSpace = availableSpace;
|
||||
|
||||
if (totalSize && availableSpace && totalSize > availableSpace * 0.8) {
|
||||
result.warnings.push(
|
||||
'Backup size may be close to available space. Monitor disk usage during backup.'
|
||||
);
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
result.errors.push(`Validation failed: ${errorMessage}`);
|
||||
result.isValid = false;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
async validateFlashPath(flashPath: string): Promise<boolean> {
|
||||
try {
|
||||
await access(flashPath);
|
||||
const stats = await stat(flashPath);
|
||||
return stats.isDirectory();
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async isFlashMounted(flashPath: string): Promise<boolean> {
|
||||
try {
|
||||
// Check if the path is a mount point by comparing device IDs
|
||||
const pathStat = await stat(flashPath);
|
||||
const parentStat = await stat(join(flashPath, '..'));
|
||||
return pathStat.dev !== parentStat.dev;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async validateGitRepository(flashPath: string): Promise<boolean> {
|
||||
const gitPath = join(flashPath, '.git');
|
||||
try {
|
||||
await access(gitPath);
|
||||
const stats = await stat(gitPath);
|
||||
return stats.isDirectory();
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async getGitRepositorySize(flashPath: string): Promise<number | null> {
|
||||
const gitPath = join(flashPath, '.git');
|
||||
try {
|
||||
const { stdout } = await execa('du', ['-sb', gitPath]);
|
||||
const size = parseInt(stdout.split('\t')[0], 10);
|
||||
return isNaN(size) ? null : size;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async validateAdditionalPath(path: string): Promise<boolean> {
|
||||
try {
|
||||
await access(path);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async calculateTotalBackupSize(config: FlashPreprocessConfigInput): Promise<number | null> {
|
||||
try {
|
||||
let totalSize = 0;
|
||||
|
||||
// Get flash directory size
|
||||
const { stdout: flashSize } = await execa('du', ['-sb', config.flashPath]);
|
||||
totalSize += parseInt(flashSize.split('\t')[0], 10) || 0;
|
||||
|
||||
// Add additional paths if specified
|
||||
if (config.additionalPaths) {
|
||||
for (const path of config.additionalPaths) {
|
||||
try {
|
||||
const { stdout: pathSize } = await execa('du', ['-sb', path]);
|
||||
totalSize += parseInt(pathSize.split('\t')[0], 10) || 0;
|
||||
} catch (error: unknown) {
|
||||
this.logger.warn(
|
||||
`Failed to get size for additional path ${path}: ${error instanceof Error ? error.message : String(error)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return totalSize;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async getAvailableSpace(path: string): Promise<number | null> {
|
||||
try {
|
||||
const { stdout } = await execa('df', ['-B1', path]);
|
||||
const lines = stdout.split('\n');
|
||||
if (lines.length > 1) {
|
||||
const fields = lines[1].split(/\s+/);
|
||||
if (fields.length >= 4) {
|
||||
const available = parseInt(fields[3], 10);
|
||||
return isNaN(available) ? null : available;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async checkGitStatus(flashPath: string): Promise<{
|
||||
hasUncommittedChanges: boolean;
|
||||
currentBranch: string | null;
|
||||
lastCommitHash: string | null;
|
||||
}> {
|
||||
const result = {
|
||||
hasUncommittedChanges: false,
|
||||
currentBranch: null as string | null,
|
||||
lastCommitHash: null as string | null,
|
||||
};
|
||||
|
||||
try {
|
||||
// Check for uncommitted changes
|
||||
const { stdout: statusOutput } = await execa('git', ['status', '--porcelain'], {
|
||||
cwd: flashPath,
|
||||
});
|
||||
result.hasUncommittedChanges = statusOutput.trim().length > 0;
|
||||
|
||||
// Get current branch
|
||||
try {
|
||||
const { stdout: branchOutput } = await execa(
|
||||
'git',
|
||||
['rev-parse', '--abbrev-ref', 'HEAD'],
|
||||
{ cwd: flashPath }
|
||||
);
|
||||
result.currentBranch = branchOutput.trim();
|
||||
} catch {
|
||||
// Ignore branch detection errors
|
||||
}
|
||||
|
||||
// Get last commit hash
|
||||
try {
|
||||
const { stdout: commitOutput } = await execa('git', ['rev-parse', 'HEAD'], {
|
||||
cwd: flashPath,
|
||||
});
|
||||
result.lastCommitHash = commitOutput.trim();
|
||||
} catch {
|
||||
// Ignore commit hash detection errors
|
||||
}
|
||||
} catch {
|
||||
// Git commands failed, repository might not be initialized
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,144 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { access, constants, stat } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
|
||||
import {
|
||||
BackupSourceConfig,
|
||||
BackupSourceProcessor,
|
||||
BackupSourceProcessorOptions,
|
||||
BackupSourceResult,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/source/backup-source-processor.interface.js';
|
||||
import { SourceType } from '@app/unraid-api/graph/resolvers/backup/source/backup-source.types.js';
|
||||
import { RawBackupConfigInput } from '@app/unraid-api/graph/resolvers/backup/source/raw/raw-source.types.js';
|
||||
|
||||
export interface RawSourceConfig extends BackupSourceConfig {
|
||||
sourcePath: string;
|
||||
excludePatterns?: string[];
|
||||
includePatterns?: string[];
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class RawSourceProcessor extends BackupSourceProcessor<RawSourceConfig> {
|
||||
readonly sourceType = SourceType.RAW;
|
||||
private readonly logger = new Logger(RawSourceProcessor.name);
|
||||
|
||||
get supportsStreaming(): boolean {
|
||||
return false;
|
||||
}
|
||||
|
||||
async execute(
|
||||
config: RawSourceConfig,
|
||||
options?: BackupSourceProcessorOptions
|
||||
): Promise<BackupSourceResult> {
|
||||
const startTime = Date.now();
|
||||
|
||||
try {
|
||||
this.logger.log(`Starting RAW backup validation for path: ${config.sourcePath}`);
|
||||
|
||||
const validation = await this.validate(config);
|
||||
if (!validation.valid) {
|
||||
return {
|
||||
success: false,
|
||||
error: validation.error || 'Validation failed',
|
||||
metadata: {
|
||||
validationError: validation.error,
|
||||
supportsStreaming: this.supportsStreaming,
|
||||
},
|
||||
supportsStreaming: this.supportsStreaming,
|
||||
};
|
||||
}
|
||||
|
||||
if (validation.warnings?.length) {
|
||||
this.logger.warn(
|
||||
`RAW backup warnings for ${config.sourcePath}: ${validation.warnings.join(', ')}`
|
||||
);
|
||||
}
|
||||
|
||||
const sourceStats = await stat(config.sourcePath);
|
||||
const duration = Date.now() - startTime;
|
||||
|
||||
this.logger.log(`RAW backup: Providing direct path for ${config.sourcePath}`);
|
||||
return {
|
||||
success: true,
|
||||
outputPath: config.sourcePath,
|
||||
supportsStreaming: this.supportsStreaming,
|
||||
isStreamingMode: false,
|
||||
metadata: {
|
||||
sourcePath: config.sourcePath,
|
||||
isDirectory: sourceStats.isDirectory(),
|
||||
size: sourceStats.size,
|
||||
duration,
|
||||
excludePatterns: config.excludePatterns,
|
||||
includePatterns: config.includePatterns,
|
||||
validationWarnings: validation.warnings,
|
||||
supportsStreaming: this.supportsStreaming,
|
||||
},
|
||||
};
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
const errorStack = error instanceof Error ? error.stack : undefined;
|
||||
this.logger.error(
|
||||
`RAW backup preparation failed for ${config.sourcePath}: ${errorMessage}`,
|
||||
errorStack
|
||||
);
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
supportsStreaming: this.supportsStreaming,
|
||||
metadata: {
|
||||
sourcePath: config.sourcePath,
|
||||
duration: Date.now() - startTime,
|
||||
supportsStreaming: this.supportsStreaming,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async validate(
|
||||
config: RawSourceConfig
|
||||
): Promise<{ valid: boolean; error?: string; warnings?: string[] }> {
|
||||
const warnings: string[] = [];
|
||||
|
||||
try {
|
||||
await access(config.sourcePath, constants.F_OK | constants.R_OK);
|
||||
} catch {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Source path does not exist or is not readable: ${config.sourcePath}`,
|
||||
};
|
||||
}
|
||||
|
||||
const restrictedPaths = ['/proc', '/sys', '/dev'];
|
||||
const isRestricted = restrictedPaths.some((path) => config.sourcePath.startsWith(path));
|
||||
if (isRestricted) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Cannot backup restricted system paths: ${config.sourcePath}`,
|
||||
};
|
||||
}
|
||||
|
||||
if (config.excludePatterns?.length && config.includePatterns?.length) {
|
||||
warnings.push(
|
||||
'Both include and exclude patterns specified - exclude patterns take precedence'
|
||||
);
|
||||
}
|
||||
|
||||
const stats = await stat(config.sourcePath);
|
||||
if (stats.isDirectory()) {
|
||||
const largeDirPaths = ['/mnt/user', '/mnt/disk'];
|
||||
const isLargeDir = largeDirPaths.some((path) => config.sourcePath.startsWith(path));
|
||||
if (isLargeDir && !config.excludePatterns?.length && !config.includePatterns?.length) {
|
||||
warnings.push(
|
||||
'Backing up large directory without filters may take significant time and space'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return { valid: true, warnings };
|
||||
}
|
||||
|
||||
async cleanup(result: BackupSourceResult): Promise<void> {
|
||||
this.logger.log(`RAW backup cleanup completed for: ${result.metadata?.sourcePath}`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,45 @@
|
||||
import { Field, InputType, ObjectType } from '@nestjs/graphql';
|
||||
|
||||
import { IsArray, IsNotEmpty, IsOptional, IsString } from 'class-validator';
|
||||
|
||||
import {
|
||||
BaseSourceConfig,
|
||||
BaseSourceConfigInput,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/source/base-source.types.js';
|
||||
|
||||
@InputType()
|
||||
export class RawBackupConfigInput extends BaseSourceConfigInput {
|
||||
@Field(() => String, { description: 'Source path to backup' })
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
sourcePath!: string;
|
||||
|
||||
@Field(() => [String], { description: 'File patterns to exclude from backup', nullable: true })
|
||||
@IsOptional()
|
||||
@IsArray()
|
||||
excludePatterns?: string[];
|
||||
|
||||
@Field(() => [String], { description: 'File patterns to include in backup', nullable: true })
|
||||
@IsOptional()
|
||||
@IsArray()
|
||||
includePatterns?: string[];
|
||||
}
|
||||
|
||||
@ObjectType()
|
||||
export class RawBackupConfig implements BaseSourceConfig {
|
||||
@Field(() => String, { nullable: false })
|
||||
label: string = 'Raw file backup';
|
||||
|
||||
@Field(() => String)
|
||||
sourcePath!: string;
|
||||
|
||||
@Field(() => [String], { nullable: true })
|
||||
excludePatterns?: string[];
|
||||
|
||||
@Field(() => [String], { nullable: true })
|
||||
includePatterns?: string[];
|
||||
|
||||
static isTypeOf(obj: any): obj is RawBackupConfig {
|
||||
return obj && typeof obj.sourcePath === 'string';
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,252 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { promises as fs } from 'fs';
|
||||
import { dirname } from 'path';
|
||||
|
||||
import { execa } from 'execa';
|
||||
|
||||
import {
|
||||
BackupSourceConfig,
|
||||
BackupSourceProcessor,
|
||||
BackupSourceProcessorOptions,
|
||||
BackupSourceResult,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/source/backup-source-processor.interface.js';
|
||||
import { SourceType } from '@app/unraid-api/graph/resolvers/backup/source/backup-source.types.js';
|
||||
|
||||
export interface ScriptSourceConfig extends BackupSourceConfig {
|
||||
scriptPath: string;
|
||||
scriptArgs?: string[];
|
||||
workingDirectory?: string;
|
||||
environment?: Record<string, string>;
|
||||
outputPath: string;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class ScriptSourceProcessor extends BackupSourceProcessor<ScriptSourceConfig> {
|
||||
readonly sourceType = SourceType.SCRIPT;
|
||||
private readonly logger = new Logger(ScriptSourceProcessor.name);
|
||||
private readonly tempDir = '/tmp/unraid-script-preprocessing';
|
||||
private readonly maxOutputSize = 100 * 1024 * 1024; // 100MB limit
|
||||
|
||||
get supportsStreaming(): boolean {
|
||||
return false;
|
||||
}
|
||||
|
||||
async execute(
|
||||
config: ScriptSourceConfig,
|
||||
options?: BackupSourceProcessorOptions
|
||||
): Promise<BackupSourceResult> {
|
||||
const startTime = Date.now();
|
||||
|
||||
const validation = await this.validate(config);
|
||||
if (!validation.valid) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Script configuration validation failed: ${validation.error}`,
|
||||
metadata: { validationError: validation.error, validationWarnings: validation.warnings },
|
||||
};
|
||||
}
|
||||
|
||||
if (validation.warnings?.length) {
|
||||
this.logger.warn(`Script backup warnings: ${validation.warnings.join(', ')}`);
|
||||
}
|
||||
|
||||
try {
|
||||
await this.ensureTempDirectory();
|
||||
|
||||
const { command, args } = this.buildCommand(config);
|
||||
|
||||
this.logger.log(`Executing script: ${command} ${args.join(' ')}`);
|
||||
|
||||
await this.runScriptWithTimeout(command, args, config.timeout / 1000);
|
||||
|
||||
const outputSize = await this.getFileSize(config.outputPath);
|
||||
if (outputSize === 0) {
|
||||
throw new Error('Script produced no output');
|
||||
}
|
||||
|
||||
if (outputSize > this.maxOutputSize) {
|
||||
throw new Error(
|
||||
`Script output too large: ${outputSize} bytes (max: ${this.maxOutputSize})`
|
||||
);
|
||||
}
|
||||
|
||||
const duration = Date.now() - startTime;
|
||||
this.logger.log(
|
||||
`Script completed successfully in ${duration}ms, output size: ${outputSize} bytes`
|
||||
);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
outputPath: config.outputPath,
|
||||
metadata: {
|
||||
scriptPath: config.scriptPath,
|
||||
duration,
|
||||
outputSize,
|
||||
workingDirectory: config.workingDirectory,
|
||||
scriptArgs: config.scriptArgs,
|
||||
validationWarnings: validation.warnings,
|
||||
},
|
||||
};
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
this.logger.error(`Script backup failed: ${errorMessage}`);
|
||||
|
||||
try {
|
||||
await fs.unlink(config.outputPath);
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
metadata: {
|
||||
scriptPath: config.scriptPath,
|
||||
duration: Date.now() - startTime,
|
||||
workingDirectory: config.workingDirectory,
|
||||
scriptArgs: config.scriptArgs,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async validate(
|
||||
config: ScriptSourceConfig
|
||||
): Promise<{ valid: boolean; error?: string; warnings?: string[] }> {
|
||||
try {
|
||||
await fs.access(config.scriptPath, fs.constants.F_OK | fs.constants.X_OK);
|
||||
|
||||
const restrictedPaths = ['/boot', '/mnt/user', '/mnt/disk'];
|
||||
const isRestricted = restrictedPaths.some((path) => config.scriptPath.startsWith(path));
|
||||
|
||||
if (isRestricted) {
|
||||
return {
|
||||
valid: false,
|
||||
error: 'Script cannot be located in restricted paths (/boot, /mnt/user, /mnt/disk*)',
|
||||
};
|
||||
}
|
||||
|
||||
if (config.workingDirectory) {
|
||||
try {
|
||||
await fs.access(config.workingDirectory, fs.constants.F_OK);
|
||||
} catch {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Working directory does not exist: ${config.workingDirectory}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const outputDir = dirname(config.outputPath);
|
||||
try {
|
||||
await fs.access(outputDir, fs.constants.F_OK | fs.constants.W_OK);
|
||||
} catch {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Output directory does not exist or is not writable: ${outputDir}`,
|
||||
};
|
||||
}
|
||||
|
||||
if (config.scriptArgs) {
|
||||
for (const arg of config.scriptArgs) {
|
||||
if (arg.length > 1000) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Script argument too long (max 1000 characters): ${arg.substring(0, 50)}...`,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { valid: true };
|
||||
} catch {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Script does not exist or is not executable: ${config.scriptPath}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async cleanup(result: BackupSourceResult): Promise<void> {
|
||||
if (result.outputPath) {
|
||||
await this.cleanupFile(result.outputPath);
|
||||
}
|
||||
}
|
||||
|
||||
private async ensureTempDirectory(): Promise<void> {
|
||||
try {
|
||||
await fs.access(this.tempDir);
|
||||
} catch {
|
||||
await fs.mkdir(this.tempDir, { recursive: true, mode: 0o700 });
|
||||
}
|
||||
}
|
||||
|
||||
private buildCommand(config: ScriptSourceConfig): { command: string; args: string[] } {
|
||||
const command = 'timeout';
|
||||
const args = [
|
||||
`${config.timeout / 1000}s`,
|
||||
'nice',
|
||||
'-n',
|
||||
'10',
|
||||
'ionice',
|
||||
'-c',
|
||||
'3',
|
||||
'bash',
|
||||
'-c',
|
||||
`cd "${config.workingDirectory || '/tmp'}" && exec "${config.scriptPath}" ${(config.scriptArgs || []).join(' ')}`,
|
||||
];
|
||||
|
||||
return { command, args };
|
||||
}
|
||||
|
||||
private async runScriptWithTimeout(
|
||||
command: string,
|
||||
args: string[],
|
||||
timeoutSeconds: number
|
||||
): Promise<void> {
|
||||
try {
|
||||
await execa(command, args, {
|
||||
timeout: timeoutSeconds * 1000,
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
env: {
|
||||
...process.env,
|
||||
PATH: '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin',
|
||||
},
|
||||
uid: 99, // nobody user
|
||||
gid: 99, // nobody group
|
||||
});
|
||||
} catch (error: any) {
|
||||
if (error.timedOut) {
|
||||
throw new Error(`Script timeout after ${timeoutSeconds} seconds`);
|
||||
}
|
||||
if (error.signal) {
|
||||
throw new Error(`Script killed by signal: ${error.signal}`);
|
||||
}
|
||||
if (error.exitCode !== undefined && error.exitCode !== 0) {
|
||||
throw new Error(
|
||||
`Script exited with code ${error.exitCode}. stderr: ${error.stderr || ''}`
|
||||
);
|
||||
}
|
||||
throw new Error(`Failed to execute script: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
private async getFileSize(filePath: string): Promise<number> {
|
||||
try {
|
||||
const stats = await fs.stat(filePath);
|
||||
return stats.size;
|
||||
} catch {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
private async cleanupFile(filePath: string): Promise<void> {
|
||||
try {
|
||||
await fs.unlink(filePath);
|
||||
this.logger.log(`Cleaned up script output file: ${filePath}`);
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
this.logger.error(`Failed to cleanup script output ${filePath}: ${errorMessage}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,63 @@
|
||||
import { Field, InputType, ObjectType } from '@nestjs/graphql';
|
||||
|
||||
import { IsNotEmpty, IsOptional, IsString } from 'class-validator';
|
||||
import { GraphQLJSON } from 'graphql-scalars';
|
||||
|
||||
import {
|
||||
BaseSourceConfig,
|
||||
BaseSourceConfigInput,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/source/base-source.types.js';
|
||||
|
||||
@InputType()
|
||||
export class ScriptPreprocessConfigInput extends BaseSourceConfigInput {
|
||||
@Field(() => String, { description: 'Path to the script file' })
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
scriptPath!: string;
|
||||
|
||||
@Field(() => [String], { description: 'Arguments to pass to the script', nullable: true })
|
||||
@IsOptional()
|
||||
scriptArgs?: string[];
|
||||
|
||||
@Field(() => String, { description: 'Working directory for script execution', nullable: true })
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
workingDirectory?: string;
|
||||
|
||||
@Field(() => GraphQLJSON, {
|
||||
description: 'Environment variables for script execution',
|
||||
nullable: true,
|
||||
})
|
||||
@IsOptional()
|
||||
environment?: Record<string, string>;
|
||||
|
||||
@Field(() => String, { description: 'Output file path where script should write data' })
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
outputPath!: string;
|
||||
}
|
||||
|
||||
@ObjectType()
|
||||
export class ScriptPreprocessConfig implements BaseSourceConfig {
|
||||
@Field(() => String, { nullable: false })
|
||||
label: string = 'Script backup';
|
||||
|
||||
@Field(() => String)
|
||||
scriptPath!: string;
|
||||
|
||||
@Field(() => [String], { nullable: true })
|
||||
scriptArgs?: string[];
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
workingDirectory?: string;
|
||||
|
||||
@Field(() => GraphQLJSON, { nullable: true })
|
||||
environment?: Record<string, string>;
|
||||
|
||||
@Field(() => String)
|
||||
outputPath!: string;
|
||||
|
||||
static isTypeOf(obj: any): obj is ScriptPreprocessConfig {
|
||||
return obj && typeof obj.scriptPath === 'string' && typeof obj.outputPath === 'string';
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,285 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { access, constants, stat } from 'fs/promises';
|
||||
import { dirname, isAbsolute, resolve } from 'path';
|
||||
|
||||
import { ScriptPreprocessConfigInput } from '@app/unraid-api/graph/resolvers/backup/source/script/script-source.types.js';
|
||||
|
||||
export interface ScriptValidationResult {
|
||||
isValid: boolean;
|
||||
errors: string[];
|
||||
warnings: string[];
|
||||
metadata: {
|
||||
scriptExists?: boolean;
|
||||
scriptExecutable?: boolean;
|
||||
workingDirectoryExists?: boolean;
|
||||
outputDirectoryExists?: boolean;
|
||||
outputDirectoryWritable?: boolean;
|
||||
environmentVariablesValid?: boolean;
|
||||
resolvedScriptPath?: string;
|
||||
resolvedWorkingDirectory?: string;
|
||||
resolvedOutputPath?: string;
|
||||
};
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class ScriptValidationService {
|
||||
private readonly logger = new Logger(ScriptValidationService.name);
|
||||
|
||||
async validateScriptConfig(config: ScriptPreprocessConfigInput): Promise<ScriptValidationResult> {
|
||||
const result: ScriptValidationResult = {
|
||||
isValid: true,
|
||||
errors: [],
|
||||
warnings: [],
|
||||
metadata: {},
|
||||
};
|
||||
|
||||
try {
|
||||
// Resolve and validate script path
|
||||
const resolvedScriptPath = this.resolveScriptPath(
|
||||
config.scriptPath,
|
||||
config.workingDirectory
|
||||
);
|
||||
result.metadata.resolvedScriptPath = resolvedScriptPath;
|
||||
|
||||
const scriptExists = await this.validateScriptExists(resolvedScriptPath);
|
||||
result.metadata.scriptExists = scriptExists;
|
||||
|
||||
if (!scriptExists) {
|
||||
result.errors.push(`Script file '${resolvedScriptPath}' does not exist`);
|
||||
result.isValid = false;
|
||||
return result;
|
||||
}
|
||||
|
||||
// Check if script is executable
|
||||
const scriptExecutable = await this.validateScriptExecutable(resolvedScriptPath);
|
||||
result.metadata.scriptExecutable = scriptExecutable;
|
||||
|
||||
if (!scriptExecutable) {
|
||||
result.warnings.push(`Script file '${resolvedScriptPath}' may not be executable`);
|
||||
}
|
||||
|
||||
// Validate working directory
|
||||
if (config.workingDirectory) {
|
||||
const resolvedWorkingDir = resolve(config.workingDirectory);
|
||||
result.metadata.resolvedWorkingDirectory = resolvedWorkingDir;
|
||||
|
||||
const workingDirExists = await this.validateDirectory(resolvedWorkingDir);
|
||||
result.metadata.workingDirectoryExists = workingDirExists;
|
||||
|
||||
if (!workingDirExists) {
|
||||
result.errors.push(`Working directory '${resolvedWorkingDir}' does not exist`);
|
||||
result.isValid = false;
|
||||
}
|
||||
}
|
||||
|
||||
// Validate output path and directory
|
||||
const resolvedOutputPath = this.resolveOutputPath(
|
||||
config.outputPath,
|
||||
config.workingDirectory
|
||||
);
|
||||
result.metadata.resolvedOutputPath = resolvedOutputPath;
|
||||
|
||||
const outputDirectory = dirname(resolvedOutputPath);
|
||||
const outputDirExists = await this.validateDirectory(outputDirectory);
|
||||
result.metadata.outputDirectoryExists = outputDirExists;
|
||||
|
||||
if (!outputDirExists) {
|
||||
result.errors.push(`Output directory '${outputDirectory}' does not exist`);
|
||||
result.isValid = false;
|
||||
} else {
|
||||
// Check if output directory is writable
|
||||
const outputDirWritable = await this.validateDirectoryWritable(outputDirectory);
|
||||
result.metadata.outputDirectoryWritable = outputDirWritable;
|
||||
|
||||
if (!outputDirWritable) {
|
||||
result.errors.push(`Output directory '${outputDirectory}' is not writable`);
|
||||
result.isValid = false;
|
||||
}
|
||||
}
|
||||
|
||||
// Validate environment variables
|
||||
if (config.environment) {
|
||||
const envValid = this.validateEnvironmentVariables(config.environment);
|
||||
result.metadata.environmentVariablesValid = envValid;
|
||||
|
||||
if (!envValid) {
|
||||
result.warnings.push('Some environment variables may contain invalid values');
|
||||
}
|
||||
}
|
||||
|
||||
// Security validations
|
||||
this.performSecurityValidations(config, result);
|
||||
} catch (error: unknown) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
result.errors.push(`Validation failed: ${errorMessage}`);
|
||||
result.isValid = false;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private resolveScriptPath(scriptPath: string, workingDirectory?: string): string {
|
||||
if (isAbsolute(scriptPath)) {
|
||||
return scriptPath;
|
||||
}
|
||||
|
||||
const baseDir = workingDirectory || process.cwd();
|
||||
return resolve(baseDir, scriptPath);
|
||||
}
|
||||
|
||||
private resolveOutputPath(outputPath: string, workingDirectory?: string): string {
|
||||
if (isAbsolute(outputPath)) {
|
||||
return outputPath;
|
||||
}
|
||||
|
||||
const baseDir = workingDirectory || process.cwd();
|
||||
return resolve(baseDir, outputPath);
|
||||
}
|
||||
|
||||
async validateScriptExists(scriptPath: string): Promise<boolean> {
|
||||
try {
|
||||
await access(scriptPath);
|
||||
const stats = await stat(scriptPath);
|
||||
return stats.isFile();
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async validateScriptExecutable(scriptPath: string): Promise<boolean> {
|
||||
try {
|
||||
const stats = await stat(scriptPath);
|
||||
// Check if file has execute permissions (basic check)
|
||||
return (stats.mode & parseInt('111', 8)) !== 0;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async validateDirectory(dirPath: string): Promise<boolean> {
|
||||
try {
|
||||
await access(dirPath);
|
||||
const stats = await stat(dirPath);
|
||||
return stats.isDirectory();
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async validateDirectoryWritable(dirPath: string): Promise<boolean> {
|
||||
try {
|
||||
const stats = await stat(dirPath);
|
||||
// Check if directory has write permissions (basic check)
|
||||
return (stats.mode & parseInt('200', 8)) !== 0;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
validateEnvironmentVariables(environment: Record<string, string>): boolean {
|
||||
try {
|
||||
// Check for potentially dangerous environment variables
|
||||
const dangerousVars = ['PATH', 'LD_LIBRARY_PATH', 'HOME', 'USER'];
|
||||
const hasDangerousVars = Object.keys(environment).some((key) =>
|
||||
dangerousVars.includes(key.toUpperCase())
|
||||
);
|
||||
|
||||
if (hasDangerousVars) {
|
||||
this.logger.warn('Script environment contains potentially dangerous variables');
|
||||
}
|
||||
|
||||
// Check for valid variable names (basic validation)
|
||||
const validVarName = /^[A-Za-z_][A-Za-z0-9_]*$/;
|
||||
const invalidVars = Object.keys(environment).filter((key) => !validVarName.test(key));
|
||||
|
||||
if (invalidVars.length > 0) {
|
||||
this.logger.warn(`Invalid environment variable names: ${invalidVars.join(', ')}`);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private performSecurityValidations(
|
||||
config: ScriptPreprocessConfigInput,
|
||||
result: ScriptValidationResult
|
||||
): void {
|
||||
// Check for potentially dangerous script paths
|
||||
const dangerousPaths = ['/bin', '/usr/bin', '/sbin', '/usr/sbin'];
|
||||
const scriptInDangerousPath = dangerousPaths.some((path) =>
|
||||
result.metadata.resolvedScriptPath?.startsWith(path)
|
||||
);
|
||||
|
||||
if (scriptInDangerousPath) {
|
||||
result.warnings.push(
|
||||
'Script is located in a system directory. Ensure it is safe to execute.'
|
||||
);
|
||||
}
|
||||
|
||||
// Check for dangerous script arguments
|
||||
if (config.scriptArgs) {
|
||||
const dangerousArgs = config.scriptArgs.filter(
|
||||
(arg) =>
|
||||
arg.includes('..') ||
|
||||
arg.includes('rm ') ||
|
||||
arg.includes('sudo ') ||
|
||||
arg.includes('su ')
|
||||
);
|
||||
|
||||
if (dangerousArgs.length > 0) {
|
||||
result.warnings.push(
|
||||
'Script arguments contain potentially dangerous commands or paths.'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Check if output path is in a safe location
|
||||
if (result.metadata.resolvedOutputPath) {
|
||||
const systemPaths = ['/bin', '/usr', '/etc', '/var', '/sys', '/proc'];
|
||||
const outputInSystemPath = systemPaths.some((path) =>
|
||||
result.metadata.resolvedOutputPath?.startsWith(path)
|
||||
);
|
||||
|
||||
if (outputInSystemPath) {
|
||||
result.errors.push('Output path cannot be in system directories for security reasons.');
|
||||
result.isValid = false;
|
||||
}
|
||||
}
|
||||
|
||||
// Validate script file extension for common script types
|
||||
if (result.metadata.resolvedScriptPath) {
|
||||
const scriptExt = result.metadata.resolvedScriptPath.split('.').pop()?.toLowerCase();
|
||||
const allowedExtensions = ['sh', 'bash', 'py', 'pl', 'rb', 'js', 'php'];
|
||||
|
||||
if (scriptExt && !allowedExtensions.includes(scriptExt)) {
|
||||
result.warnings.push(
|
||||
`Script extension '.${scriptExt}' is not commonly recognized. Ensure it is executable.`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async getScriptInfo(scriptPath: string): Promise<{
|
||||
size: number | null;
|
||||
lastModified: Date | null;
|
||||
permissions: string | null;
|
||||
}> {
|
||||
try {
|
||||
const stats = await stat(scriptPath);
|
||||
return {
|
||||
size: stats.size,
|
||||
lastModified: stats.mtime,
|
||||
permissions: '0' + (stats.mode & parseInt('777', 8)).toString(8),
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
size: null,
|
||||
lastModified: null,
|
||||
permissions: null,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,139 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
|
||||
import { execa } from 'execa';
|
||||
|
||||
import {
|
||||
BackupSourceConfig,
|
||||
BackupSourceProcessor,
|
||||
BackupSourceProcessorOptions,
|
||||
BackupSourceResult,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/source/backup-source-processor.interface.js';
|
||||
import { SourceType } from '@app/unraid-api/graph/resolvers/backup/source/backup-source.types.js';
|
||||
import { ZfsPreprocessConfig } from '@app/unraid-api/graph/resolvers/backup/source/zfs/zfs-source.types.js';
|
||||
import { ZfsValidationService } from '@app/unraid-api/graph/resolvers/backup/source/zfs/zfs-validation.service.js';
|
||||
|
||||
export interface ZfsSourceConfig extends BackupSourceConfig {
|
||||
poolName: string;
|
||||
datasetName: string;
|
||||
snapshotPrefix?: string;
|
||||
cleanupSnapshots: boolean;
|
||||
retainSnapshots?: number;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class ZfsSourceProcessor extends BackupSourceProcessor<ZfsSourceConfig> {
|
||||
readonly sourceType = SourceType.ZFS;
|
||||
private readonly logger = new Logger(ZfsSourceProcessor.name);
|
||||
|
||||
constructor(private readonly zfsValidationService: ZfsValidationService) {
|
||||
super();
|
||||
}
|
||||
|
||||
get supportsStreaming(): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
async validate(
|
||||
config: ZfsSourceConfig
|
||||
): Promise<{ valid: boolean; error?: string; warnings?: string[] }> {
|
||||
try {
|
||||
const result = await this.zfsValidationService.validateZfsConfig(config as any);
|
||||
return {
|
||||
valid: result.isValid,
|
||||
error: result.errors.length > 0 ? result.errors.join(', ') : undefined,
|
||||
warnings: result.warnings,
|
||||
};
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
return { valid: false, error: errorMessage };
|
||||
}
|
||||
}
|
||||
|
||||
async execute(
|
||||
config: ZfsSourceConfig,
|
||||
options?: BackupSourceProcessorOptions
|
||||
): Promise<BackupSourceResult> {
|
||||
try {
|
||||
this.logger.log(`Starting ZFS backup for dataset: ${config.poolName}/${config.datasetName}`);
|
||||
|
||||
const validation = await this.validate(config);
|
||||
if (!validation.valid) {
|
||||
return {
|
||||
success: false,
|
||||
error: validation.error || 'ZFS validation failed',
|
||||
cleanupRequired: false,
|
||||
};
|
||||
}
|
||||
|
||||
const snapshotName = await this.createSnapshot(config);
|
||||
const snapshotPath = `${config.poolName}/${config.datasetName}@${snapshotName}`;
|
||||
|
||||
this.logger.log(`Created ZFS snapshot: ${snapshotPath}`);
|
||||
|
||||
const result: BackupSourceResult = {
|
||||
success: true,
|
||||
outputPath: snapshotPath,
|
||||
snapshotName,
|
||||
cleanupRequired: config.cleanupSnapshots,
|
||||
metadata: {
|
||||
poolName: config.poolName,
|
||||
datasetName: config.datasetName,
|
||||
snapshotPath,
|
||||
},
|
||||
};
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
this.logger.error(`ZFS backup failed: ${errorMessage}`, error);
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
cleanupRequired: false,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async cleanup(result: BackupSourceResult): Promise<void> {
|
||||
if (!result.cleanupRequired || !result.snapshotName) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const snapshotPath = (result.metadata?.snapshotPath as string) || result.outputPath;
|
||||
if (snapshotPath && typeof snapshotPath === 'string') {
|
||||
await this.destroySnapshot(snapshotPath);
|
||||
this.logger.log(`Cleaned up ZFS snapshot: ${snapshotPath}`);
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to cleanup ZFS snapshot: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
private async createSnapshot(config: ZfsSourceConfig): Promise<string> {
|
||||
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
||||
const prefix = config.snapshotPrefix || 'backup';
|
||||
const snapshotName = `${prefix}-${timestamp}`;
|
||||
const snapshotPath = `${config.poolName}/${config.datasetName}@${snapshotName}`;
|
||||
|
||||
const { stdout, stderr } = await execa('zfs', ['snapshot', snapshotPath]);
|
||||
|
||||
if (stderr) {
|
||||
this.logger.warn(`ZFS snapshot creation warning: ${stderr}`);
|
||||
}
|
||||
|
||||
this.logger.debug(`ZFS snapshot created: ${stdout}`);
|
||||
return snapshotName;
|
||||
}
|
||||
|
||||
private async destroySnapshot(snapshotPath: string): Promise<void> {
|
||||
const { stdout, stderr } = await execa('zfs', ['destroy', snapshotPath]);
|
||||
|
||||
if (stderr) {
|
||||
this.logger.warn(`ZFS snapshot destruction warning: ${stderr}`);
|
||||
}
|
||||
|
||||
this.logger.debug(`ZFS snapshot destroyed: ${stdout}`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,64 @@
|
||||
import { Field, InputType, ObjectType } from '@nestjs/graphql';
|
||||
|
||||
import { IsBoolean, IsNotEmpty, IsNumber, IsOptional, IsString, Min } from 'class-validator';
|
||||
|
||||
import {
|
||||
BaseSourceConfig,
|
||||
BaseSourceConfigInput,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/source/base-source.types.js';
|
||||
|
||||
@InputType()
|
||||
export class ZfsPreprocessConfigInput extends BaseSourceConfigInput {
|
||||
@Field(() => String, { description: 'ZFS pool name' })
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
poolName!: string;
|
||||
|
||||
@Field(() => String, { description: 'Dataset name within the pool' })
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
datasetName!: string;
|
||||
|
||||
@Field(() => String, { description: 'Snapshot name prefix', nullable: true })
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
snapshotPrefix?: string;
|
||||
|
||||
@Field(() => Boolean, {
|
||||
description: 'Whether to cleanup snapshots after backup',
|
||||
defaultValue: true,
|
||||
})
|
||||
@IsBoolean()
|
||||
cleanupSnapshots!: boolean;
|
||||
|
||||
@Field(() => Number, { description: 'Number of snapshots to retain', nullable: true })
|
||||
@IsOptional()
|
||||
@IsNumber()
|
||||
@Min(1)
|
||||
retainSnapshots?: number;
|
||||
}
|
||||
|
||||
@ObjectType()
|
||||
export class ZfsPreprocessConfig implements BaseSourceConfig {
|
||||
@Field(() => String, { nullable: false })
|
||||
label: string = 'ZFS backup';
|
||||
|
||||
@Field(() => String)
|
||||
poolName!: string;
|
||||
|
||||
@Field(() => String)
|
||||
datasetName!: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
snapshotPrefix?: string;
|
||||
|
||||
@Field(() => Boolean)
|
||||
cleanupSnapshots!: boolean;
|
||||
|
||||
@Field(() => Number, { nullable: true })
|
||||
retainSnapshots?: number;
|
||||
|
||||
static isTypeOf(obj: any): obj is ZfsPreprocessConfig {
|
||||
return obj && typeof obj.poolName === 'string' && typeof obj.datasetName === 'string';
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,245 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { access, constants } from 'fs/promises';
|
||||
|
||||
import { execa } from 'execa';
|
||||
|
||||
import { ZfsPreprocessConfigInput } from '@app/unraid-api/graph/resolvers/backup/source/zfs/zfs-source.types.js';
|
||||
|
||||
export interface ZfsValidationResult {
|
||||
isValid: boolean;
|
||||
errors: string[];
|
||||
warnings: string[];
|
||||
metadata: {
|
||||
poolExists?: boolean;
|
||||
datasetExists?: boolean;
|
||||
datasetSize?: number;
|
||||
availableSpace?: number;
|
||||
mountpoint?: string;
|
||||
};
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class ZfsValidationService {
|
||||
private readonly logger = new Logger(ZfsValidationService.name);
|
||||
|
||||
async validateZfsConfig(config: ZfsPreprocessConfigInput): Promise<ZfsValidationResult> {
|
||||
const result: ZfsValidationResult = {
|
||||
isValid: true,
|
||||
errors: [],
|
||||
warnings: [],
|
||||
metadata: {},
|
||||
};
|
||||
|
||||
try {
|
||||
// Validate pool exists
|
||||
const poolExists = await this.validatePool(config.poolName);
|
||||
result.metadata.poolExists = poolExists;
|
||||
|
||||
if (!poolExists) {
|
||||
result.errors.push(`ZFS pool '${config.poolName}' does not exist`);
|
||||
result.isValid = false;
|
||||
return result;
|
||||
}
|
||||
|
||||
// Validate dataset exists
|
||||
const datasetExists = await this.validateDataset(config.poolName, config.datasetName);
|
||||
result.metadata.datasetExists = datasetExists;
|
||||
|
||||
if (!datasetExists) {
|
||||
result.errors.push(
|
||||
`ZFS dataset '${config.poolName}/${config.datasetName}' does not exist`
|
||||
);
|
||||
result.isValid = false;
|
||||
return result;
|
||||
}
|
||||
|
||||
// Get dataset information
|
||||
const datasetInfo = await this.getDatasetInfo(config.poolName, config.datasetName);
|
||||
result.metadata = { ...result.metadata, ...datasetInfo };
|
||||
|
||||
// Validate dataset is mounted
|
||||
if (!datasetInfo.mountpoint || datasetInfo.mountpoint === 'none') {
|
||||
result.warnings.push(
|
||||
`Dataset '${config.poolName}/${config.datasetName}' is not mounted`
|
||||
);
|
||||
}
|
||||
|
||||
// Check available space for snapshots
|
||||
if (datasetInfo.availableSpace && datasetInfo.datasetSize) {
|
||||
const spaceRatio = datasetInfo.availableSpace / datasetInfo.datasetSize;
|
||||
if (spaceRatio < 0.1) {
|
||||
result.warnings.push(
|
||||
'Low available space for snapshot creation (less than 10% of dataset size)'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Validate snapshot retention settings
|
||||
if (config.retainSnapshots && config.retainSnapshots < 1) {
|
||||
result.errors.push('Retain snapshots must be at least 1');
|
||||
result.isValid = false;
|
||||
}
|
||||
|
||||
// Check for existing snapshots if cleanup is disabled
|
||||
if (!config.cleanupSnapshots) {
|
||||
const existingSnapshots = await this.getExistingSnapshots(
|
||||
config.poolName,
|
||||
config.datasetName,
|
||||
config.snapshotPrefix
|
||||
);
|
||||
if (existingSnapshots.length > 10) {
|
||||
result.warnings.push(
|
||||
`Found ${existingSnapshots.length} existing snapshots. Consider enabling cleanup.`
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
result.errors.push(`Validation failed: ${errorMessage}`);
|
||||
result.isValid = false;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
async validatePool(poolName: string): Promise<boolean> {
|
||||
try {
|
||||
await execa('zpool', ['list', '-H', '-o', 'name', poolName]);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async validateDataset(poolName: string, datasetName: string): Promise<boolean> {
|
||||
const fullPath = `${poolName}/${datasetName}`;
|
||||
try {
|
||||
await execa('zfs', ['list', '-H', '-o', 'name', fullPath]);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async getDatasetInfo(
|
||||
poolName: string,
|
||||
datasetName: string
|
||||
): Promise<{
|
||||
datasetSize?: number;
|
||||
availableSpace?: number;
|
||||
mountpoint?: string;
|
||||
}> {
|
||||
const fullPath = `${poolName}/${datasetName}`;
|
||||
const result: { datasetSize?: number; availableSpace?: number; mountpoint?: string } = {};
|
||||
|
||||
try {
|
||||
// Get dataset size
|
||||
const { stdout: sizeOutput } = await execa('zfs', [
|
||||
'list',
|
||||
'-H',
|
||||
'-p',
|
||||
'-o',
|
||||
'used',
|
||||
fullPath,
|
||||
]);
|
||||
const size = parseInt(sizeOutput.trim(), 10);
|
||||
if (!isNaN(size)) {
|
||||
result.datasetSize = size;
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
this.logger.warn(
|
||||
`Failed to get dataset size: ${error instanceof Error ? error.message : String(error)}`
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
// Get available space
|
||||
const { stdout: availOutput } = await execa('zfs', [
|
||||
'list',
|
||||
'-H',
|
||||
'-p',
|
||||
'-o',
|
||||
'avail',
|
||||
fullPath,
|
||||
]);
|
||||
const avail = parseInt(availOutput.trim(), 10);
|
||||
if (!isNaN(avail)) {
|
||||
result.availableSpace = avail;
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
this.logger.warn(
|
||||
`Failed to get available space: ${error instanceof Error ? error.message : String(error)}`
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
// Get mountpoint
|
||||
const { stdout: mountOutput } = await execa('zfs', [
|
||||
'list',
|
||||
'-H',
|
||||
'-o',
|
||||
'mountpoint',
|
||||
fullPath,
|
||||
]);
|
||||
result.mountpoint = mountOutput.trim();
|
||||
} catch (error: unknown) {
|
||||
this.logger.warn(
|
||||
`Failed to get mountpoint: ${error instanceof Error ? error.message : String(error)}`
|
||||
);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
async getExistingSnapshots(
|
||||
poolName: string,
|
||||
datasetName: string,
|
||||
prefix?: string
|
||||
): Promise<string[]> {
|
||||
const fullPath = `${poolName}/${datasetName}`;
|
||||
|
||||
try {
|
||||
const { stdout } = await execa('zfs', [
|
||||
'list',
|
||||
'-H',
|
||||
'-t',
|
||||
'snapshot',
|
||||
'-o',
|
||||
'name',
|
||||
'-r',
|
||||
fullPath,
|
||||
]);
|
||||
const snapshots = stdout.split('\n').filter((line) => line.trim());
|
||||
|
||||
if (prefix) {
|
||||
const prefixPattern = `${fullPath}@${prefix}`;
|
||||
return snapshots.filter((snapshot) => snapshot.startsWith(prefixPattern));
|
||||
}
|
||||
|
||||
return snapshots.filter((snapshot) => snapshot.startsWith(`${fullPath}@`));
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
async getPoolHealth(poolName: string): Promise<string | null> {
|
||||
try {
|
||||
const { stdout } = await execa('zpool', ['list', '-H', '-o', 'health', poolName]);
|
||||
return stdout.trim();
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async canCreateSnapshot(poolName: string, datasetName: string): Promise<boolean> {
|
||||
// Check if we have write permissions and the dataset is not readonly
|
||||
const fullPath = `${poolName}/${datasetName}`;
|
||||
|
||||
try {
|
||||
const { stdout } = await execa('zfs', ['get', '-H', '-o', 'value', 'readonly', fullPath]);
|
||||
return stdout.trim() === 'off';
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -19,6 +19,11 @@ export class DockerMutations {}
|
||||
@ObjectType()
|
||||
export class VmMutations {}
|
||||
|
||||
@ObjectType({
|
||||
description: 'Backup related mutations',
|
||||
})
|
||||
export class BackupMutations {}
|
||||
|
||||
@ObjectType({
|
||||
description: 'API Key related mutations',
|
||||
})
|
||||
@@ -51,6 +56,9 @@ export class RootMutations {
|
||||
@Field(() => VmMutations, { description: 'VM related mutations' })
|
||||
vm: VmMutations = new VmMutations();
|
||||
|
||||
@Field(() => BackupMutations, { description: 'Backup related mutations' })
|
||||
backup: BackupMutations = new BackupMutations();
|
||||
|
||||
@Field(() => ApiKeyMutations, { description: 'API Key related mutations' })
|
||||
apiKey: ApiKeyMutations = new ApiKeyMutations();
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ import { Mutation, Resolver } from '@nestjs/graphql';
|
||||
import {
|
||||
ApiKeyMutations,
|
||||
ArrayMutations,
|
||||
BackupMutations,
|
||||
DockerMutations,
|
||||
ParityCheckMutations,
|
||||
RCloneMutations,
|
||||
@@ -27,6 +28,11 @@ export class RootMutationsResolver {
|
||||
return new VmMutations();
|
||||
}
|
||||
|
||||
@Mutation(() => BackupMutations, { name: 'backup' })
|
||||
backup(): BackupMutations {
|
||||
return new BackupMutations();
|
||||
}
|
||||
|
||||
@Mutation(() => ParityCheckMutations, { name: 'parityCheck' })
|
||||
parityCheck(): ParityCheckMutations {
|
||||
return new ParityCheckMutations();
|
||||
|
||||
2626
api/src/unraid-api/graph/resolvers/rclone/Remote Control _ API.html
Normal file
2626
api/src/unraid-api/graph/resolvers/rclone/Remote Control _ API.html
Normal file
File diff suppressed because it is too large
Load Diff
@@ -5,18 +5,27 @@ import { existsSync } from 'node:fs';
|
||||
import { mkdir, rm, writeFile } from 'node:fs/promises';
|
||||
import { dirname, join } from 'node:path';
|
||||
|
||||
import { convert } from 'convert';
|
||||
import { execa } from 'execa';
|
||||
import got, { HTTPError } from 'got';
|
||||
import pRetry from 'p-retry';
|
||||
|
||||
import { sanitizeParams } from '@app/core/log.js';
|
||||
import {
|
||||
getConfigIdFromGroupId,
|
||||
isBackupJobGroup,
|
||||
} from '@app/unraid-api/graph/resolvers/backup/backup.utils.js';
|
||||
import { BackupJobStatus } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-status.model.js';
|
||||
import { RCloneStatusService } from '@app/unraid-api/graph/resolvers/rclone/rclone-status.service.js';
|
||||
import {
|
||||
CreateRCloneRemoteDto,
|
||||
DeleteRCloneRemoteDto,
|
||||
GetRCloneJobStatusDto,
|
||||
GetRCloneRemoteConfigDto,
|
||||
GetRCloneRemoteDetailsDto,
|
||||
RCloneProviderOptionResponse,
|
||||
RCloneJob,
|
||||
RCloneJobListResponse,
|
||||
RCloneJobStats,
|
||||
RCloneProviderResponse,
|
||||
RCloneRemoteConfig,
|
||||
RCloneStartBackupInput,
|
||||
@@ -24,72 +33,109 @@ import {
|
||||
} from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
|
||||
import { validateObject } from '@app/unraid-api/graph/resolvers/validation.utils.js';
|
||||
|
||||
// Constants for the service
|
||||
const CONSTANTS = {
|
||||
LOG_LEVEL: {
|
||||
DEBUG: 'DEBUG',
|
||||
INFO: 'INFO',
|
||||
},
|
||||
RETRY_CONFIG: {
|
||||
retries: 6,
|
||||
minTimeout: 100,
|
||||
maxTimeout: 5000,
|
||||
factor: 2,
|
||||
maxRetryTime: 30000,
|
||||
},
|
||||
TIMEOUTS: {
|
||||
GRACEFUL_SHUTDOWN: 2000,
|
||||
PROCESS_CLEANUP: 1000,
|
||||
},
|
||||
};
|
||||
|
||||
// Internal interface for job status response from RClone API
|
||||
interface RCloneJobStatusResponse {
|
||||
id?: string | number;
|
||||
group?: string;
|
||||
stats?: RCloneJobStats;
|
||||
finished?: boolean;
|
||||
error?: string;
|
||||
[key: string]: any;
|
||||
}
|
||||
|
||||
interface BackupStatusResult {
|
||||
isRunning: boolean;
|
||||
stats: RCloneJobStats | null;
|
||||
jobCount: number;
|
||||
activeJobs: RCloneJobStatusResponse[];
|
||||
}
|
||||
|
||||
interface JobOperationResult {
|
||||
stopped: string[];
|
||||
forgotten?: string[];
|
||||
errors: string[];
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
|
||||
private isInitialized: boolean = false;
|
||||
private initialized: boolean = false;
|
||||
private readonly logger = new Logger(RCloneApiService.name);
|
||||
private rcloneSocketPath: string = '';
|
||||
private rcloneBaseUrl: string = '';
|
||||
private rcloneProcess: ChildProcess | null = null;
|
||||
private readonly rcloneUsername: string =
|
||||
process.env.RCLONE_USERNAME || crypto.randomBytes(12).toString('base64');
|
||||
process.env.RCLONE_USERNAME ||
|
||||
(process.env.NODE_ENV === 'test' ? 'test-user' : crypto.randomBytes(12).toString('hex'));
|
||||
private readonly rclonePassword: string =
|
||||
process.env.RCLONE_PASSWORD || crypto.randomBytes(24).toString('base64');
|
||||
constructor() {}
|
||||
process.env.RCLONE_PASSWORD ||
|
||||
(process.env.NODE_ENV === 'test' ? 'test-pass' : crypto.randomBytes(24).toString('hex'));
|
||||
|
||||
/**
|
||||
* Returns whether the RClone service is initialized and ready to use
|
||||
*/
|
||||
get initialized(): boolean {
|
||||
return this.isInitialized;
|
||||
constructor(private readonly statusService: RCloneStatusService) {}
|
||||
|
||||
get isInitialized(): boolean {
|
||||
return this.initialized;
|
||||
}
|
||||
|
||||
async onModuleInit(): Promise<void> {
|
||||
try {
|
||||
// Check if rclone binary is available first
|
||||
const isBinaryAvailable = await this.checkRcloneBinaryExists();
|
||||
if (!isBinaryAvailable) {
|
||||
this.logger.warn('RClone binary not found on system, skipping initialization');
|
||||
this.isInitialized = false;
|
||||
return;
|
||||
}
|
||||
// Check if rclone binary is available first
|
||||
const isBinaryAvailable = await this.checkRcloneBinaryExists();
|
||||
if (!isBinaryAvailable) {
|
||||
this.logger.warn('RClone binary not found on system, skipping initialization');
|
||||
this.initialized = false;
|
||||
return;
|
||||
}
|
||||
|
||||
const { getters } = await import('@app/store/index.js');
|
||||
// Check if Rclone Socket is running, if not, start it.
|
||||
this.rcloneSocketPath = getters.paths()['rclone-socket'];
|
||||
const logFilePath = join(getters.paths()['log-base'], 'rclone-unraid-api.log');
|
||||
this.logger.log(`RClone socket path: ${this.rcloneSocketPath}`);
|
||||
this.logger.log(`RClone log file path: ${logFilePath}`);
|
||||
const { getters } = await import('@app/store/index.js');
|
||||
// Check if Rclone Socket is running, if not, start it.
|
||||
this.rcloneSocketPath = getters.paths()['rclone-socket'];
|
||||
const logFilePath = join(getters.paths()['log-base'], 'rclone-unraid-api.log');
|
||||
this.logger.log(`RClone socket path: ${this.rcloneSocketPath}`);
|
||||
this.logger.log(`RClone log file path: ${logFilePath}`);
|
||||
|
||||
// Format the base URL for Unix socket
|
||||
this.rcloneBaseUrl = `http://unix:${this.rcloneSocketPath}:`;
|
||||
// Format the base URL for Unix socket
|
||||
this.rcloneBaseUrl = `http://unix:${this.rcloneSocketPath}:`;
|
||||
|
||||
// Check if the RClone socket exists, if not, create it.
|
||||
const socketExists = await this.checkRcloneSocketExists(this.rcloneSocketPath);
|
||||
// Check if the RClone socket exists, if not, create it.
|
||||
const socketExists = await this.checkRcloneSocketExists(this.rcloneSocketPath);
|
||||
|
||||
if (socketExists) {
|
||||
const isRunning = await this.checkRcloneSocketRunning();
|
||||
if (isRunning) {
|
||||
this.isInitialized = true;
|
||||
return;
|
||||
} else {
|
||||
this.logger.warn(
|
||||
'RClone socket is not running but socket exists, removing socket before starting...'
|
||||
);
|
||||
await rm(this.rcloneSocketPath, { force: true });
|
||||
}
|
||||
|
||||
this.logger.warn('RClone socket is not running, starting it...');
|
||||
this.isInitialized = await this.startRcloneSocket(this.rcloneSocketPath, logFilePath);
|
||||
if (socketExists) {
|
||||
const isRunning = await this.checkRcloneSocketRunning();
|
||||
if (isRunning) {
|
||||
this.initialized = true;
|
||||
return;
|
||||
} else {
|
||||
this.logger.warn('RClone socket does not exist, creating it...');
|
||||
this.isInitialized = await this.startRcloneSocket(this.rcloneSocketPath, logFilePath);
|
||||
return;
|
||||
this.logger.warn(
|
||||
'RClone socket is not running but socket exists, removing socket before starting...'
|
||||
);
|
||||
await rm(this.rcloneSocketPath, { force: true });
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
this.logger.error(`Error initializing RCloneApiService: ${error}`);
|
||||
this.isInitialized = false;
|
||||
|
||||
this.logger.warn('RClone socket is not running, starting it...');
|
||||
this.initialized = await this.startRcloneSocket(this.rcloneSocketPath, logFilePath);
|
||||
return;
|
||||
} else {
|
||||
this.logger.warn('RClone socket does not exist, creating it...');
|
||||
this.initialized = await this.startRcloneSocket(this.rcloneSocketPath, logFilePath);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -98,95 +144,145 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
|
||||
this.logger.log('RCloneApiService module destroyed');
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts the RClone RC daemon on the specified socket path
|
||||
*/
|
||||
private async initializeRCloneService(): Promise<void> {
|
||||
const { getters } = await import('@app/store/index.js');
|
||||
this.rcloneSocketPath = getters.paths()['rclone-socket'];
|
||||
const logFilePath = join(getters.paths()['log-base'], 'rclone-unraid-api.log');
|
||||
|
||||
this.rcloneBaseUrl = `http://unix:${this.rcloneSocketPath}:`;
|
||||
this.logger.log(
|
||||
`Ensuring RClone is stopped and socket is clean before initialization. Socket path: ${this.rcloneSocketPath}`
|
||||
);
|
||||
|
||||
// Stop any existing rclone instances and remove the socket file.
|
||||
await this.stopRcloneSocket();
|
||||
|
||||
this.logger.warn('Proceeding to start new RClone socket...');
|
||||
this.initialized = await this.startRcloneSocket(this.rcloneSocketPath, logFilePath);
|
||||
}
|
||||
|
||||
private async startRcloneSocket(socketPath: string, logFilePath: string): Promise<boolean> {
|
||||
try {
|
||||
// Make log file exists
|
||||
if (!existsSync(logFilePath)) {
|
||||
this.logger.debug(`Creating log file: ${logFilePath}`);
|
||||
await mkdir(dirname(logFilePath), { recursive: true });
|
||||
await writeFile(logFilePath, '', 'utf-8');
|
||||
}
|
||||
await this.ensureLogFileExists(logFilePath);
|
||||
|
||||
const rcloneArgs = this.buildRcloneArgs(socketPath, logFilePath);
|
||||
this.logger.log(`Starting RClone RC daemon on socket: ${socketPath}`);
|
||||
// Start the process but don't wait for it to finish
|
||||
this.rcloneProcess = execa(
|
||||
'rclone',
|
||||
[
|
||||
'rcd',
|
||||
'--rc-addr',
|
||||
socketPath,
|
||||
'--log-level',
|
||||
'INFO',
|
||||
'--log-file',
|
||||
logFilePath,
|
||||
...(this.rcloneUsername ? ['--rc-user', this.rcloneUsername] : []),
|
||||
...(this.rclonePassword ? ['--rc-pass', this.rclonePassword] : []),
|
||||
],
|
||||
{ detached: false } // Keep attached to manage lifecycle
|
||||
);
|
||||
|
||||
// Handle potential errors during process spawning (e.g., command not found)
|
||||
this.rcloneProcess.on('error', (error: Error) => {
|
||||
this.logger.error(`RClone process failed to start: ${error.message}`);
|
||||
this.rcloneProcess = null; // Clear the handle on error
|
||||
this.isInitialized = false;
|
||||
});
|
||||
const rcloneProcessExecution = execa('rclone', rcloneArgs, { detached: false });
|
||||
this.rcloneProcess = rcloneProcessExecution;
|
||||
this.setupProcessListeners();
|
||||
|
||||
// Handle unexpected exit
|
||||
this.rcloneProcess.on('exit', (code, signal) => {
|
||||
this.logger.warn(
|
||||
`RClone process exited unexpectedly with code: ${code}, signal: ${signal}`
|
||||
rcloneProcessExecution.catch((error) => {
|
||||
this.logger.debug(
|
||||
`Rclone process execution promise rejected (expected if process failed to start or exited prematurely): ${
|
||||
error.shortMessage || error.message
|
||||
}`
|
||||
);
|
||||
this.rcloneProcess = null;
|
||||
this.isInitialized = false;
|
||||
});
|
||||
|
||||
// Wait for socket to be ready using p-retry with exponential backoff
|
||||
await pRetry(
|
||||
async () => {
|
||||
const isRunning = await this.checkRcloneSocketRunning();
|
||||
if (!isRunning) throw new Error('Rclone socket not ready');
|
||||
},
|
||||
{
|
||||
retries: 6, // 7 attempts total
|
||||
minTimeout: 100,
|
||||
maxTimeout: 5000,
|
||||
factor: 2,
|
||||
maxRetryTime: 30000,
|
||||
}
|
||||
);
|
||||
|
||||
await this.waitForSocketReady();
|
||||
this.logger.log('RClone RC daemon started and socket is ready.');
|
||||
return true;
|
||||
} catch (error: unknown) {
|
||||
this.logger.error(`Error starting RClone RC daemon: ${error}`);
|
||||
this.rcloneProcess?.kill(); // Attempt to kill if started but failed later
|
||||
this.rcloneProcess = null;
|
||||
this.logger.error(`Error during RClone RC daemon startup sequence: ${error}`);
|
||||
this.cleanupFailedProcess();
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private async stopRcloneSocket(): Promise<void> {
|
||||
if (this.rcloneProcess && !this.rcloneProcess.killed) {
|
||||
this.logger.log(`Stopping RClone RC daemon process (PID: ${this.rcloneProcess.pid})...`);
|
||||
try {
|
||||
const killed = this.rcloneProcess.kill('SIGTERM'); // Send SIGTERM first
|
||||
if (!killed) {
|
||||
this.logger.warn('Failed to kill RClone process with SIGTERM, trying SIGKILL.');
|
||||
this.rcloneProcess.kill('SIGKILL'); // Force kill if SIGTERM failed
|
||||
}
|
||||
this.logger.log('RClone process stopped.');
|
||||
} catch (error: unknown) {
|
||||
this.logger.error(`Error stopping RClone process: ${error}`);
|
||||
} finally {
|
||||
this.rcloneProcess = null; // Clear the handle
|
||||
}
|
||||
private async ensureLogFileExists(logFilePath: string): Promise<void> {
|
||||
if (!existsSync(logFilePath)) {
|
||||
await mkdir(dirname(logFilePath), { recursive: true });
|
||||
await writeFile(logFilePath, '', 'utf-8');
|
||||
}
|
||||
}
|
||||
|
||||
private buildRcloneArgs(socketPath: string, logFilePath: string): string[] {
|
||||
// Unix sockets don't require HTTP authentication - the socket itself provides security
|
||||
const isUnixSocket = socketPath.startsWith('/');
|
||||
|
||||
if (isUnixSocket) {
|
||||
this.logger.log('Using Unix socket - HTTP authentication not required, using --rc-no-auth');
|
||||
} else {
|
||||
this.logger.log('RClone process not running or already stopped.');
|
||||
this.logger.log(
|
||||
`Building RClone args with username: ${this.rcloneUsername ? '[SET]' : '[NOT SET]'}, password: ${this.rclonePassword ? '[SET]' : '[NOT SET]'}`
|
||||
);
|
||||
}
|
||||
|
||||
// Clean up the socket file if it exists
|
||||
const args = [
|
||||
'rcd',
|
||||
'--rc-addr',
|
||||
socketPath,
|
||||
'--log-level',
|
||||
'INFO',
|
||||
'--log-file',
|
||||
logFilePath,
|
||||
// For Unix sockets, use --rc-no-auth instead of credentials
|
||||
...(isUnixSocket ? ['--rc-no-auth'] : []),
|
||||
// Only add authentication for non-Unix socket connections
|
||||
...(!isUnixSocket && this.rcloneUsername ? ['--rc-user', this.rcloneUsername] : []),
|
||||
...(!isUnixSocket && this.rclonePassword ? ['--rc-pass', this.rclonePassword] : []),
|
||||
];
|
||||
|
||||
this.logger.log(`RClone command args: ${args.join(' ')}`);
|
||||
return args;
|
||||
}
|
||||
|
||||
private setupProcessListeners(): void {
|
||||
if (!this.rcloneProcess) return;
|
||||
|
||||
this.rcloneProcess.on('error', (error: Error) => {
|
||||
this.logger.error(`RClone process failed to start: ${error.message}`);
|
||||
this.cleanupFailedProcess();
|
||||
});
|
||||
|
||||
this.rcloneProcess.on('exit', (code, signal) => {
|
||||
this.logger.warn(`RClone process exited unexpectedly with code: ${code}, signal: ${signal}`);
|
||||
this.cleanupFailedProcess();
|
||||
});
|
||||
}
|
||||
|
||||
private cleanupFailedProcess(): void {
|
||||
this.rcloneProcess = null;
|
||||
this.initialized = false;
|
||||
}
|
||||
|
||||
private async waitForSocketReady(): Promise<void> {
|
||||
await pRetry(async () => {
|
||||
const isRunning = await this.checkRcloneSocketRunning();
|
||||
if (!isRunning) throw new Error('Rclone socket not ready');
|
||||
}, CONSTANTS.RETRY_CONFIG);
|
||||
}
|
||||
|
||||
private async stopRcloneSocket(): Promise<void> {
|
||||
if (this.rcloneProcess && !this.rcloneProcess.killed) {
|
||||
await this.terminateProcess();
|
||||
}
|
||||
|
||||
await this.killExistingRcloneProcesses();
|
||||
await this.removeSocketFile();
|
||||
}
|
||||
|
||||
private async terminateProcess(): Promise<void> {
|
||||
if (!this.rcloneProcess) return;
|
||||
|
||||
this.logger.log(`Stopping RClone RC daemon process (PID: ${this.rcloneProcess.pid})...`);
|
||||
|
||||
try {
|
||||
const killed = this.rcloneProcess.kill('SIGTERM');
|
||||
if (!killed) {
|
||||
this.logger.warn('Failed to kill with SIGTERM, using SIGKILL');
|
||||
this.rcloneProcess.kill('SIGKILL');
|
||||
}
|
||||
this.logger.log('RClone process stopped');
|
||||
} catch (error: unknown) {
|
||||
this.logger.error(`Error stopping RClone process: ${error}`);
|
||||
} finally {
|
||||
this.rcloneProcess = null;
|
||||
}
|
||||
}
|
||||
|
||||
private async removeSocketFile(): Promise<void> {
|
||||
if (this.rcloneSocketPath && existsSync(this.rcloneSocketPath)) {
|
||||
this.logger.log(`Removing RClone socket file: ${this.rcloneSocketPath}`);
|
||||
try {
|
||||
@@ -197,36 +293,19 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the RClone socket exists
|
||||
*/
|
||||
private async checkRcloneSocketExists(socketPath: string): Promise<boolean> {
|
||||
const socketExists = existsSync(socketPath);
|
||||
if (!socketExists) {
|
||||
this.logger.warn(`RClone socket does not exist at: ${socketPath}`);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
return socketExists;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the RClone socket is running
|
||||
*/
|
||||
private async checkRcloneSocketRunning(): Promise<boolean> {
|
||||
try {
|
||||
// A simple API call to check if the daemon is responsive
|
||||
await this.callRcloneApi('core/pid');
|
||||
this.logger.debug('RClone socket is running and responsive.');
|
||||
return true;
|
||||
} catch (error: unknown) {
|
||||
// Silently handle socket connection errors during checks
|
||||
if (error instanceof Error) {
|
||||
if (error.message.includes('ENOENT') || error.message.includes('ECONNREFUSED')) {
|
||||
this.logger.debug('RClone socket not accessible - daemon likely not running');
|
||||
} else {
|
||||
this.logger.debug(`RClone socket check failed: ${error.message}`);
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -267,18 +346,11 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
|
||||
return response?.remotes || [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get complete remote details
|
||||
*/
|
||||
async getRemoteDetails(input: GetRCloneRemoteDetailsDto): Promise<RCloneRemoteConfig> {
|
||||
await validateObject(GetRCloneRemoteDetailsDto, input);
|
||||
const config = (await this.getRemoteConfig({ name: input.name })) || {};
|
||||
return config as RCloneRemoteConfig;
|
||||
return this.getRemoteConfig({ name: input.name });
|
||||
}
|
||||
|
||||
/**
|
||||
* Get configuration of a remote
|
||||
*/
|
||||
async getRemoteConfig(input: GetRCloneRemoteConfigDto): Promise<RCloneRemoteConfig> {
|
||||
await validateObject(GetRCloneRemoteConfigDto, input);
|
||||
return this.callRcloneApi('config/get', { name: input.name });
|
||||
@@ -300,77 +372,329 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update an existing remote configuration
|
||||
*/
|
||||
async updateRemote(input: UpdateRCloneRemoteDto): Promise<any> {
|
||||
async updateRemote(input: UpdateRCloneRemoteDto): Promise<unknown> {
|
||||
await validateObject(UpdateRCloneRemoteDto, input);
|
||||
this.logger.log(`Updating remote: ${input.name}`);
|
||||
const params = {
|
||||
|
||||
return this.callRcloneApi('config/update', {
|
||||
name: input.name,
|
||||
...input.parameters,
|
||||
};
|
||||
return this.callRcloneApi('config/update', params);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a remote configuration
|
||||
*/
|
||||
async deleteRemote(input: DeleteRCloneRemoteDto): Promise<any> {
|
||||
async deleteRemote(input: DeleteRCloneRemoteDto): Promise<unknown> {
|
||||
await validateObject(DeleteRCloneRemoteDto, input);
|
||||
this.logger.log(`Deleting remote: ${input.name}`);
|
||||
return this.callRcloneApi('config/delete', { name: input.name });
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a backup operation using sync/copy
|
||||
* This copies a directory from source to destination
|
||||
*/
|
||||
async startBackup(input: RCloneStartBackupInput): Promise<any> {
|
||||
async startBackup(input: RCloneStartBackupInput): Promise<unknown> {
|
||||
await validateObject(RCloneStartBackupInput, input);
|
||||
this.logger.log(`Starting backup from ${input.srcPath} to ${input.dstPath}`);
|
||||
|
||||
this.logger.log(`Starting backup: ${input.srcPath} → ${input.dstPath}`);
|
||||
|
||||
const group = input.configId ? getConfigIdFromGroupId(input.configId) : 'manual';
|
||||
|
||||
const params = {
|
||||
srcFs: input.srcPath,
|
||||
dstFs: input.dstPath,
|
||||
...(input.async && { _async: input.async }),
|
||||
_group: group,
|
||||
...(input.options || {}),
|
||||
};
|
||||
return this.callRcloneApi('sync/copy', params);
|
||||
|
||||
const result = await this.callRcloneApi('sync/copy', params);
|
||||
const jobId = result.jobid || result.jobId || 'unknown';
|
||||
this.logger.log(`Backup job created with ID: ${jobId} in group: ${group}`);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the status of a running job
|
||||
* Gets enhanced job status with computed fields
|
||||
*/
|
||||
async getJobStatus(input: GetRCloneJobStatusDto): Promise<any> {
|
||||
await validateObject(GetRCloneJobStatusDto, input);
|
||||
return this.callRcloneApi('job/status', { jobid: input.jobId });
|
||||
async getEnhancedJobStatus(jobId: string, configId?: string): Promise<RCloneJob | null> {
|
||||
try {
|
||||
await validateObject(GetRCloneJobStatusDto, { jobId });
|
||||
|
||||
if (isBackupJobGroup(jobId)) {
|
||||
try {
|
||||
const stats = await this.callRcloneApi('core/stats', { group: jobId });
|
||||
const enhancedStats = this.statusService.enhanceStatsWithFormattedFields({
|
||||
...stats,
|
||||
group: jobId,
|
||||
});
|
||||
|
||||
const job = this.statusService.transformStatsToJob(jobId, enhancedStats);
|
||||
job.configId = configId || getConfigIdFromGroupId(jobId);
|
||||
|
||||
// Add computed fields
|
||||
job.isRunning = job.status === BackupJobStatus.RUNNING;
|
||||
job.errorMessage = job.error || undefined;
|
||||
|
||||
return job;
|
||||
} catch (error) {
|
||||
this.logger.warn(`Failed to get group stats for ${jobId}: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to individual job status
|
||||
const jobStatus = await this.getIndividualJobStatus(jobId);
|
||||
const enhancedStats = jobStatus.stats
|
||||
? this.statusService.enhanceStatsWithFormattedFields(jobStatus.stats)
|
||||
: {};
|
||||
|
||||
const job = this.statusService.transformStatsToJob(jobId, enhancedStats);
|
||||
|
||||
// Add computed fields
|
||||
job.isRunning = job.status === BackupJobStatus.RUNNING;
|
||||
job.errorMessage = job.error || undefined;
|
||||
|
||||
// Add configId if provided
|
||||
if (configId) {
|
||||
job.configId = configId;
|
||||
}
|
||||
|
||||
return job;
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to fetch enhanced job status for ${jobId}: %o`, error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all running jobs
|
||||
*/
|
||||
async listRunningJobs(): Promise<any> {
|
||||
async getJobStatus(input: GetRCloneJobStatusDto): Promise<RCloneJob> {
|
||||
const enhancedJob = await this.getEnhancedJobStatus(input.jobId);
|
||||
if (enhancedJob) {
|
||||
return enhancedJob;
|
||||
}
|
||||
|
||||
// Final fallback
|
||||
const jobStatus = await this.getIndividualJobStatus(input.jobId);
|
||||
return this.statusService.parseJobWithStats(input.jobId, jobStatus);
|
||||
}
|
||||
|
||||
async getIndividualJobStatus(jobId: string): Promise<RCloneJobStatusResponse> {
|
||||
this.logger.debug(`Fetching status for job ${jobId}`);
|
||||
const result = await this.callRcloneApi('job/status', { jobid: jobId });
|
||||
|
||||
if (result.error) {
|
||||
this.logger.warn(`Job ${jobId} has error: ${result.error}`);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
async listRunningJobs(): Promise<RCloneJobListResponse> {
|
||||
this.logger.debug('Fetching job list from RClone API');
|
||||
return this.callRcloneApi('job/list');
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic method to call the RClone RC API
|
||||
*/
|
||||
private async callRcloneApi(endpoint: string, params: Record<string, any> = {}): Promise<any> {
|
||||
const url = `${this.rcloneBaseUrl}/${endpoint}`;
|
||||
async getAllJobsWithStats(): Promise<RCloneJob[]> {
|
||||
try {
|
||||
this.logger.debug(
|
||||
`Calling RClone API: ${url} with params: ${JSON.stringify(sanitizeParams(params))}`
|
||||
// Get both the job list and group list
|
||||
const [runningJobs, groupList] = await Promise.all([
|
||||
this.listRunningJobs(),
|
||||
this.callRcloneApi('core/group-list'),
|
||||
]);
|
||||
|
||||
this.logger.debug(`Running jobs: ${JSON.stringify(runningJobs)}`);
|
||||
this.logger.debug(`Group list: ${JSON.stringify(groupList)}`);
|
||||
|
||||
// Safety check: if too many groups, something is wrong
|
||||
if (groupList.groups && groupList.groups.length > 100) {
|
||||
this.logger.error(
|
||||
`DANGER: Found ${groupList.groups.length} groups, aborting to prevent job explosion`
|
||||
);
|
||||
return [];
|
||||
}
|
||||
|
||||
// Safety check: if too many individual jobs, something is wrong
|
||||
if (runningJobs.jobids && runningJobs.jobids.length > 1000) {
|
||||
this.logger.error(
|
||||
`DANGER: Found ${runningJobs.jobids.length} individual jobs, aborting to prevent performance issues`
|
||||
);
|
||||
return [];
|
||||
}
|
||||
|
||||
if (!runningJobs.jobids?.length) {
|
||||
this.logger.debug('No running jobs found');
|
||||
return [];
|
||||
}
|
||||
|
||||
const backupGroups = (groupList.groups || []).filter((group: string) =>
|
||||
isBackupJobGroup(group)
|
||||
);
|
||||
|
||||
const response = await got.post(url, {
|
||||
json: params,
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
headers: {
|
||||
Authorization: `Basic ${Buffer.from(`${this.rcloneUsername}:${this.rclonePassword}`).toString('base64')}`,
|
||||
},
|
||||
if (backupGroups.length === 0) {
|
||||
this.logger.debug('No backup groups found');
|
||||
return [];
|
||||
}
|
||||
|
||||
// Get group stats for all backup groups to get proper stats and group info
|
||||
const groupStatsMap = new Map<string, any>();
|
||||
await Promise.all(
|
||||
backupGroups.map(async (group: string) => {
|
||||
try {
|
||||
const stats = await this.callRcloneApi('core/stats', { group });
|
||||
groupStatsMap.set(group, stats);
|
||||
} catch (error) {
|
||||
this.logger.warn(`Failed to get stats for group ${group}: ${error}`);
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
const jobs: RCloneJob[] = [];
|
||||
|
||||
// For each backup group, create a job entry with proper stats
|
||||
backupGroups.forEach((group) => {
|
||||
const groupStats = groupStatsMap.get(group);
|
||||
if (!groupStats) return;
|
||||
|
||||
this.logger.debug(`Processing group ${group}: stats=${JSON.stringify(groupStats)}`);
|
||||
|
||||
const extractedConfigId = getConfigIdFromGroupId(group);
|
||||
|
||||
const enhancedStats = this.statusService.enhanceStatsWithFormattedFields({
|
||||
...groupStats,
|
||||
group,
|
||||
});
|
||||
|
||||
const job = this.statusService.transformStatsToJob(group, enhancedStats);
|
||||
job.configId = extractedConfigId;
|
||||
|
||||
// Only include jobs that are truly active (not completed)
|
||||
const isActivelyTransferring = groupStats.transferring?.length > 0;
|
||||
const isActivelyChecking = groupStats.checking?.length > 0;
|
||||
const hasActiveSpeed = groupStats.speed > 0;
|
||||
const isNotFinished = !groupStats.finished && groupStats.fatalError !== true;
|
||||
|
||||
if ((isActivelyTransferring || isActivelyChecking || hasActiveSpeed) && isNotFinished) {
|
||||
jobs.push(job);
|
||||
}
|
||||
});
|
||||
|
||||
this.logger.debug(
|
||||
`Found ${jobs.length} active backup jobs from ${backupGroups.length} groups`
|
||||
);
|
||||
return jobs;
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to get jobs with stats:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
async stopAllJobs(): Promise<JobOperationResult> {
|
||||
const runningJobs = await this.listRunningJobs();
|
||||
|
||||
if (!runningJobs.jobids?.length) {
|
||||
this.logger.log('No running jobs to stop');
|
||||
return { stopped: [], errors: [] };
|
||||
}
|
||||
|
||||
this.logger.log(`Stopping ${runningJobs.jobids.length} running jobs`);
|
||||
return this.executeJobOperation(runningJobs.jobids, 'stop');
|
||||
}
|
||||
|
||||
async stopJob(jobId: string): Promise<JobOperationResult> {
|
||||
this.logger.log(`Stopping job: ${jobId}`);
|
||||
|
||||
if (isBackupJobGroup(jobId)) {
|
||||
// This is a group, use the stopgroup endpoint
|
||||
return this.executeGroupOperation([jobId], 'stopgroup');
|
||||
} else {
|
||||
// This is an individual job ID, use the regular stop endpoint
|
||||
return this.executeJobOperation([jobId], 'stop');
|
||||
}
|
||||
}
|
||||
|
||||
private async executeGroupOperation(
|
||||
groupNames: string[],
|
||||
operation: 'stopgroup'
|
||||
): Promise<JobOperationResult> {
|
||||
const stopped: string[] = [];
|
||||
const errors: string[] = [];
|
||||
|
||||
const promises = groupNames.map(async (groupName) => {
|
||||
try {
|
||||
await this.callRcloneApi(`job/${operation}`, { group: groupName });
|
||||
stopped.push(groupName);
|
||||
this.logger.log(`${operation}ped group: ${groupName}`);
|
||||
} catch (error) {
|
||||
const errorMsg = `Failed to ${operation} group ${groupName}: ${error}`;
|
||||
errors.push(errorMsg);
|
||||
this.logger.error(errorMsg);
|
||||
}
|
||||
});
|
||||
|
||||
await Promise.allSettled(promises);
|
||||
return { stopped, errors };
|
||||
}
|
||||
|
||||
private async executeJobOperation(
|
||||
jobIds: (string | number)[],
|
||||
operation: 'stop'
|
||||
): Promise<JobOperationResult> {
|
||||
const stopped: string[] = [];
|
||||
const errors: string[] = [];
|
||||
|
||||
const promises = jobIds.map(async (jobId) => {
|
||||
try {
|
||||
await this.callRcloneApi(`job/${operation}`, { jobid: jobId });
|
||||
stopped.push(String(jobId));
|
||||
this.logger.log(`${operation}ped job: ${jobId}`);
|
||||
} catch (error) {
|
||||
const errorMsg = `Failed to ${operation} job ${jobId}: ${error}`;
|
||||
errors.push(errorMsg);
|
||||
this.logger.error(errorMsg);
|
||||
}
|
||||
});
|
||||
|
||||
await Promise.allSettled(promises);
|
||||
return { stopped, errors };
|
||||
}
|
||||
|
||||
async getBackupStatus(): Promise<BackupStatusResult> {
|
||||
const runningJobs = await this.listRunningJobs();
|
||||
|
||||
if (!runningJobs.jobids?.length) {
|
||||
return this.statusService.parseBackupStatus(runningJobs, []);
|
||||
}
|
||||
|
||||
const jobStatuses = await Promise.allSettled(
|
||||
runningJobs.jobids.map((jobId) => this.getIndividualJobStatus(String(jobId)))
|
||||
);
|
||||
|
||||
return this.statusService.parseBackupStatus(runningJobs, jobStatuses);
|
||||
}
|
||||
|
||||
private async callRcloneApi(endpoint: string, params: Record<string, unknown> = {}): Promise<any> {
|
||||
const url = `${this.rcloneBaseUrl}/${endpoint}`;
|
||||
|
||||
// Unix sockets don't require HTTP authentication - the socket itself provides security
|
||||
const isUnixSocket = this.rcloneSocketPath && this.rcloneSocketPath.startsWith('/');
|
||||
|
||||
const requestOptions: any = {
|
||||
json: params,
|
||||
responseType: 'json',
|
||||
enableUnixSockets: true,
|
||||
};
|
||||
|
||||
// Only add authentication headers for non-Unix socket connections
|
||||
if (!isUnixSocket && this.rcloneUsername && this.rclonePassword) {
|
||||
const authString = `${this.rcloneUsername}:${this.rclonePassword}`;
|
||||
const authHeader = `Basic ${Buffer.from(authString).toString('base64')}`;
|
||||
requestOptions.headers = {
|
||||
Authorization: authHeader,
|
||||
};
|
||||
this.logger.debug(
|
||||
`Calling RClone API: ${endpoint} with auth header: ${authHeader.substring(0, 20)}...`
|
||||
);
|
||||
} else {
|
||||
this.logger.debug(`Calling RClone API: ${endpoint} via Unix socket (no auth required)`);
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await got.post(url, requestOptions);
|
||||
return response.body;
|
||||
} catch (error: unknown) {
|
||||
this.handleApiError(error, endpoint, params);
|
||||
@@ -378,54 +702,108 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
|
||||
}
|
||||
|
||||
private handleApiError(error: unknown, endpoint: string, params: Record<string, unknown>): never {
|
||||
const sanitizedParams = sanitizeParams(params);
|
||||
|
||||
if (error instanceof HTTPError) {
|
||||
const statusCode = error.response.statusCode;
|
||||
const rcloneError = this.extractRcloneError(error.response.body, params);
|
||||
const detailedErrorMessage = `Rclone API Error (${endpoint}, HTTP ${statusCode}): ${rcloneError}`;
|
||||
const message = `Rclone API Error (${endpoint}, HTTP ${statusCode}): ${rcloneError}`;
|
||||
|
||||
const sanitizedParams = sanitizeParams(params);
|
||||
this.logger.error(
|
||||
`Original ${detailedErrorMessage} | Params: ${JSON.stringify(sanitizedParams)}`,
|
||||
error.stack
|
||||
);
|
||||
|
||||
throw new Error(detailedErrorMessage);
|
||||
} else if (error instanceof Error) {
|
||||
const detailedErrorMessage = `Error calling RClone API (${endpoint}) with params ${JSON.stringify(sanitizeParams(params))}: ${error.message}`;
|
||||
this.logger.error(detailedErrorMessage, error.stack);
|
||||
throw error;
|
||||
} else {
|
||||
const detailedErrorMessage = `Unknown error calling RClone API (${endpoint}) with params ${JSON.stringify(sanitizeParams(params))}: ${String(error)}`;
|
||||
this.logger.error(detailedErrorMessage);
|
||||
throw new Error(detailedErrorMessage);
|
||||
this.logger.error(`${message} | Params: ${JSON.stringify(sanitizedParams)}`, error.stack);
|
||||
throw new Error(message);
|
||||
}
|
||||
|
||||
const message =
|
||||
error instanceof Error
|
||||
? `Error calling RClone API (${endpoint}): ${error.message}`
|
||||
: `Unknown error calling RClone API (${endpoint}): ${String(error)}`;
|
||||
|
||||
this.logger.error(
|
||||
`${message} | Params: ${JSON.stringify(sanitizedParams)}`,
|
||||
error instanceof Error ? error.stack : undefined
|
||||
);
|
||||
throw error instanceof Error ? error : new Error(message);
|
||||
}
|
||||
|
||||
private extractRcloneError(responseBody: unknown, fallbackParams: Record<string, unknown>): string {
|
||||
try {
|
||||
let errorBody: unknown;
|
||||
if (typeof responseBody === 'string') {
|
||||
errorBody = JSON.parse(responseBody);
|
||||
} else if (typeof responseBody === 'object' && responseBody !== null) {
|
||||
errorBody = responseBody;
|
||||
}
|
||||
const errorBody = typeof responseBody === 'string' ? JSON.parse(responseBody) : responseBody;
|
||||
|
||||
if (errorBody && typeof errorBody === 'object' && 'error' in errorBody) {
|
||||
const typedErrorBody = errorBody as { error: unknown; input?: unknown };
|
||||
let rcloneError = `Rclone Error: ${String(typedErrorBody.error)}`;
|
||||
if (typedErrorBody.input) {
|
||||
rcloneError += ` | Input: ${JSON.stringify(typedErrorBody.input)}`;
|
||||
} else if (fallbackParams) {
|
||||
rcloneError += ` | Original Params: ${JSON.stringify(fallbackParams)}`;
|
||||
const typedError = errorBody as { error: unknown; input?: unknown };
|
||||
let message = `Rclone Error: ${String(typedError.error)}`;
|
||||
|
||||
if (typedError.input) {
|
||||
message += ` | Input: ${JSON.stringify(typedError.input)}`;
|
||||
} else {
|
||||
message += ` | Params: ${JSON.stringify(fallbackParams)}`;
|
||||
}
|
||||
return rcloneError;
|
||||
} else if (responseBody) {
|
||||
return `Non-standard error response body: ${typeof responseBody === 'string' ? responseBody : JSON.stringify(responseBody)}`;
|
||||
} else {
|
||||
return 'Empty error response body received.';
|
||||
|
||||
return message;
|
||||
}
|
||||
} catch (parseOrAccessError) {
|
||||
return `Failed to process error response body. Raw body: ${typeof responseBody === 'string' ? responseBody : JSON.stringify(responseBody)}`;
|
||||
|
||||
return responseBody
|
||||
? `Non-standard error response: ${typeof responseBody === 'string' ? responseBody : JSON.stringify(responseBody)}`
|
||||
: 'Empty error response received';
|
||||
} catch {
|
||||
return `Failed to process error response: ${typeof responseBody === 'string' ? responseBody : JSON.stringify(responseBody)}`;
|
||||
}
|
||||
}
|
||||
|
||||
private async killExistingRcloneProcesses(): Promise<void> {
|
||||
try {
|
||||
this.logger.log('Checking for existing rclone processes...');
|
||||
const { stdout } = await execa('pgrep', ['-f', 'rclone.*rcd'], { reject: false });
|
||||
|
||||
if (!stdout.trim()) {
|
||||
this.logger.log('No existing rclone processes found');
|
||||
return;
|
||||
}
|
||||
|
||||
const pids = stdout
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((pid) => pid.trim());
|
||||
this.logger.log(`Found ${pids.length} existing rclone process(es): ${pids.join(', ')}`);
|
||||
|
||||
await this.terminateProcesses(pids);
|
||||
await this.cleanupStaleSocket();
|
||||
} catch (error) {
|
||||
this.logger.warn(`Error during rclone process cleanup: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
private async terminateProcesses(pids: string[]): Promise<void> {
|
||||
for (const pid of pids) {
|
||||
try {
|
||||
this.logger.log(`Terminating rclone process PID: ${pid}`);
|
||||
|
||||
await execa('kill', ['-TERM', pid], { reject: false });
|
||||
await new Promise((resolve) =>
|
||||
setTimeout(resolve, CONSTANTS.TIMEOUTS.GRACEFUL_SHUTDOWN)
|
||||
);
|
||||
|
||||
const { exitCode } = await execa('kill', ['-0', pid], { reject: false });
|
||||
|
||||
if (exitCode === 0) {
|
||||
this.logger.warn(`Process ${pid} still running, using SIGKILL`);
|
||||
await execa('kill', ['-KILL', pid], { reject: false });
|
||||
await new Promise((resolve) =>
|
||||
setTimeout(resolve, CONSTANTS.TIMEOUTS.PROCESS_CLEANUP)
|
||||
);
|
||||
}
|
||||
|
||||
this.logger.log(`Successfully terminated process ${pid}`);
|
||||
} catch (error) {
|
||||
this.logger.warn(`Failed to kill process ${pid}: ${error}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async cleanupStaleSocket(): Promise<void> {
|
||||
if (this.rcloneSocketPath && existsSync(this.rcloneSocketPath)) {
|
||||
await rm(this.rcloneSocketPath, { force: true });
|
||||
this.logger.log('Removed stale socket file');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,505 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { BackupJobStatus } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-status.model.js';
|
||||
import { RCloneStatusService } from '@app/unraid-api/graph/resolvers/rclone/rclone-status.service.js';
|
||||
import { RCloneJobStats } from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
|
||||
import { FormatService } from '@app/unraid-api/utils/format.service.js';
|
||||
|
||||
// Mock NestJS Logger to suppress logs during tests
|
||||
vi.mock('@nestjs/common', async (importOriginal) => {
|
||||
const original = await importOriginal<typeof import('@nestjs/common')>();
|
||||
return {
|
||||
...original,
|
||||
Logger: vi.fn(() => ({
|
||||
log: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
})),
|
||||
};
|
||||
});
|
||||
|
||||
describe('RCloneStatusService', () => {
|
||||
let service: RCloneStatusService;
|
||||
let mockFormatService: FormatService;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
mockFormatService = {
|
||||
formatBytes: vi.fn().mockImplementation((bytes: number) => `${bytes} B`),
|
||||
formatSpeed: vi.fn().mockImplementation((bytesPerSecond: number) => `${bytesPerSecond} B/s`),
|
||||
formatDuration: vi.fn().mockImplementation((seconds: number) => `${seconds}s`),
|
||||
} as any;
|
||||
|
||||
service = new RCloneStatusService(mockFormatService);
|
||||
});
|
||||
|
||||
describe('enhanceStatsWithFormattedFields', () => {
|
||||
it('should add formatted fields for all numeric stats', () => {
|
||||
const stats: RCloneJobStats = {
|
||||
bytes: 1024,
|
||||
speed: 512,
|
||||
elapsedTime: 60,
|
||||
eta: 120,
|
||||
};
|
||||
|
||||
const result = service.enhanceStatsWithFormattedFields(stats);
|
||||
|
||||
expect(result).toEqual({
|
||||
bytes: 1024,
|
||||
speed: 512,
|
||||
elapsedTime: 60,
|
||||
eta: 120,
|
||||
formattedBytes: '1024 B',
|
||||
formattedSpeed: '512 B/s',
|
||||
formattedElapsedTime: '60s',
|
||||
formattedEta: '120s',
|
||||
calculatedPercentage: 0,
|
||||
isActivelyRunning: true,
|
||||
isCompleted: false,
|
||||
});
|
||||
expect(mockFormatService.formatBytes).toHaveBeenCalledWith(1024);
|
||||
expect(mockFormatService.formatSpeed).toHaveBeenCalledWith(512);
|
||||
expect(mockFormatService.formatDuration).toHaveBeenCalledWith(60);
|
||||
expect(mockFormatService.formatDuration).toHaveBeenCalledWith(120);
|
||||
});
|
||||
|
||||
it('should not add formatted fields for undefined values', () => {
|
||||
const stats: RCloneJobStats = {
|
||||
bytes: undefined,
|
||||
speed: undefined,
|
||||
elapsedTime: undefined,
|
||||
eta: undefined,
|
||||
};
|
||||
|
||||
const result = service.enhanceStatsWithFormattedFields(stats);
|
||||
|
||||
expect(result).toEqual({
|
||||
bytes: undefined,
|
||||
speed: undefined,
|
||||
elapsedTime: undefined,
|
||||
eta: undefined,
|
||||
calculatedPercentage: 0,
|
||||
formattedElapsedTime: '0s',
|
||||
formattedEta: 'Unknown',
|
||||
formattedSpeed: '0 B/s',
|
||||
isActivelyRunning: false,
|
||||
isCompleted: false,
|
||||
});
|
||||
expect(mockFormatService.formatBytes).not.toHaveBeenCalled();
|
||||
expect(mockFormatService.formatDuration).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not add formatted fields for null values', () => {
|
||||
const stats: RCloneJobStats = {
|
||||
bytes: null as any,
|
||||
speed: null as any,
|
||||
elapsedTime: null as any,
|
||||
eta: null as any,
|
||||
};
|
||||
|
||||
const result = service.enhanceStatsWithFormattedFields(stats);
|
||||
|
||||
expect(result).toEqual({
|
||||
bytes: null,
|
||||
speed: null,
|
||||
elapsedTime: null,
|
||||
eta: null,
|
||||
calculatedPercentage: 0,
|
||||
formattedElapsedTime: '0s',
|
||||
formattedEta: 'Unknown',
|
||||
formattedSpeed: '0 B/s',
|
||||
isActivelyRunning: false,
|
||||
isCompleted: false,
|
||||
});
|
||||
expect(mockFormatService.formatBytes).not.toHaveBeenCalled();
|
||||
expect(mockFormatService.formatDuration).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not add formatted speed for zero speed', () => {
|
||||
const stats: RCloneJobStats = {
|
||||
speed: 0,
|
||||
};
|
||||
|
||||
const result = service.enhanceStatsWithFormattedFields(stats);
|
||||
|
||||
expect(result).toEqual({
|
||||
speed: 0,
|
||||
calculatedPercentage: 0,
|
||||
formattedElapsedTime: '0s',
|
||||
formattedEta: 'Unknown',
|
||||
formattedSpeed: '0 B/s',
|
||||
isActivelyRunning: false,
|
||||
isCompleted: false,
|
||||
});
|
||||
expect(mockFormatService.formatSpeed).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not add formatted eta for zero eta', () => {
|
||||
const stats: RCloneJobStats = {
|
||||
eta: 0,
|
||||
};
|
||||
|
||||
const result = service.enhanceStatsWithFormattedFields(stats);
|
||||
|
||||
expect(result).toEqual({
|
||||
eta: 0,
|
||||
calculatedPercentage: 0,
|
||||
formattedElapsedTime: '0s',
|
||||
formattedEta: 'Unknown',
|
||||
formattedSpeed: '0 B/s',
|
||||
isActivelyRunning: false,
|
||||
isCompleted: false,
|
||||
});
|
||||
expect(mockFormatService.formatDuration).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('transformStatsToJob', () => {
|
||||
it('should create RCloneJob with completed status when transfers match total', () => {
|
||||
const stats: RCloneJobStats = {
|
||||
group: 'unraid-backup',
|
||||
fatalError: false,
|
||||
transfers: 5,
|
||||
totalTransfers: 5,
|
||||
errors: 0,
|
||||
percentage: 100,
|
||||
};
|
||||
|
||||
const result = service.transformStatsToJob('123', stats);
|
||||
|
||||
expect(result).toEqual({
|
||||
id: '123',
|
||||
group: 'unraid-backup',
|
||||
stats,
|
||||
finished: true,
|
||||
success: true,
|
||||
error: undefined,
|
||||
progressPercentage: 100,
|
||||
status: BackupJobStatus.COMPLETED,
|
||||
hasRecentJob: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('should create RCloneJob with running status when transfers incomplete', () => {
|
||||
const stats: RCloneJobStats = {
|
||||
group: 'unraid-backup',
|
||||
fatalError: false,
|
||||
transfers: 3,
|
||||
totalTransfers: 5,
|
||||
errors: 0,
|
||||
percentage: 60,
|
||||
};
|
||||
|
||||
const result = service.transformStatsToJob('123', stats);
|
||||
|
||||
expect(result).toEqual({
|
||||
id: '123',
|
||||
group: 'unraid-backup',
|
||||
stats,
|
||||
finished: false,
|
||||
success: true,
|
||||
error: undefined,
|
||||
progressPercentage: 60,
|
||||
status: BackupJobStatus.RUNNING,
|
||||
hasRecentJob: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('should create RCloneJob with error status when lastError exists', () => {
|
||||
const stats: RCloneJobStats = {
|
||||
group: 'unraid-backup',
|
||||
fatalError: false,
|
||||
transfers: 0,
|
||||
totalTransfers: 5,
|
||||
errors: 1,
|
||||
percentage: 0,
|
||||
lastError: 'Connection timeout',
|
||||
};
|
||||
|
||||
const result = service.transformStatsToJob('123', stats);
|
||||
|
||||
expect(result).toEqual({
|
||||
id: '123',
|
||||
group: 'unraid-backup',
|
||||
stats,
|
||||
finished: false,
|
||||
success: false,
|
||||
error: 'Connection timeout',
|
||||
progressPercentage: 0,
|
||||
status: BackupJobStatus.FAILED,
|
||||
hasRecentJob: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('should create RCloneJob with cancelled status when lastError is context canceled', () => {
|
||||
const stats: RCloneJobStats = {
|
||||
group: 'unraid-backup',
|
||||
fatalError: false,
|
||||
transfers: 0,
|
||||
totalTransfers: 5,
|
||||
errors: 1,
|
||||
percentage: 0,
|
||||
lastError: 'context canceled',
|
||||
};
|
||||
|
||||
const result = service.transformStatsToJob('123', stats);
|
||||
|
||||
expect(result).toEqual({
|
||||
id: '123',
|
||||
group: 'unraid-backup',
|
||||
stats,
|
||||
finished: false,
|
||||
success: false,
|
||||
error: 'context canceled',
|
||||
progressPercentage: 0,
|
||||
status: BackupJobStatus.CANCELLED,
|
||||
hasRecentJob: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle numeric job ID', () => {
|
||||
const stats: RCloneJobStats = {
|
||||
fatalError: false,
|
||||
transfers: 0,
|
||||
totalTransfers: 0,
|
||||
};
|
||||
|
||||
const result = service.transformStatsToJob(456, stats);
|
||||
|
||||
expect(result.id).toBe('456');
|
||||
});
|
||||
|
||||
it('should handle missing group', () => {
|
||||
const stats: RCloneJobStats = {
|
||||
fatalError: false,
|
||||
transfers: 0,
|
||||
totalTransfers: 0,
|
||||
};
|
||||
|
||||
const result = service.transformStatsToJob('123', stats);
|
||||
|
||||
expect(result.group).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('calculateCombinedStats', () => {
|
||||
it('should combine stats from multiple jobs', () => {
|
||||
const mockActiveJobs = [
|
||||
{
|
||||
stats: {
|
||||
bytes: 1024,
|
||||
checks: 2,
|
||||
transfers: 3,
|
||||
totalBytes: 2048,
|
||||
totalChecks: 4,
|
||||
totalTransfers: 6,
|
||||
speed: 100,
|
||||
eta: 120,
|
||||
},
|
||||
},
|
||||
{
|
||||
stats: {
|
||||
bytes: 512,
|
||||
checks: 1,
|
||||
transfers: 2,
|
||||
totalBytes: 1024,
|
||||
totalChecks: 2,
|
||||
totalTransfers: 4,
|
||||
speed: 200,
|
||||
eta: 60,
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const result = service.calculateCombinedStats(mockActiveJobs);
|
||||
|
||||
expect(result).toEqual({
|
||||
bytes: 1536,
|
||||
checks: 3,
|
||||
transfers: 5,
|
||||
totalBytes: 3072,
|
||||
totalChecks: 6,
|
||||
totalTransfers: 10,
|
||||
speed: 200, // Max speed
|
||||
eta: 120, // Max eta
|
||||
});
|
||||
});
|
||||
|
||||
it('should return null for empty jobs array', () => {
|
||||
const result = service.calculateCombinedStats([]);
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null when no valid stats', () => {
|
||||
const mockActiveJobs = [{ stats: null as any }, { stats: undefined as any }];
|
||||
const result = service.calculateCombinedStats(mockActiveJobs);
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseActiveJobs', () => {
|
||||
it('should return active jobs that are not finished', () => {
|
||||
const mockJobStatuses = [
|
||||
{ status: 'fulfilled', value: { id: '1', finished: false } },
|
||||
{ status: 'fulfilled', value: { id: '2', finished: true } },
|
||||
{ status: 'rejected', reason: 'Error' },
|
||||
] as PromiseSettledResult<any>[];
|
||||
|
||||
const result = service.parseActiveJobs(mockJobStatuses);
|
||||
|
||||
expect(result).toEqual([{ id: '1', finished: false }]);
|
||||
});
|
||||
|
||||
it('should return empty array when all jobs are finished', () => {
|
||||
const mockJobStatuses = [
|
||||
{ status: 'fulfilled', value: { id: '1', finished: true } },
|
||||
] as PromiseSettledResult<any>[];
|
||||
|
||||
const result = service.parseActiveJobs(mockJobStatuses);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseBackupStatus', () => {
|
||||
it('should return running status when active jobs exist', () => {
|
||||
const mockRunningJobs = { jobids: ['123', '456'] };
|
||||
const mockJobStatuses = [
|
||||
{ status: 'fulfilled', value: { id: '123', finished: false, stats: { bytes: 1024 } } },
|
||||
{ status: 'fulfilled', value: { id: '456', finished: false, stats: { bytes: 512 } } },
|
||||
] as PromiseSettledResult<any>[];
|
||||
|
||||
const result = service.parseBackupStatus(mockRunningJobs, mockJobStatuses);
|
||||
|
||||
expect(result).toEqual({
|
||||
isRunning: true,
|
||||
stats: expect.objectContaining({ bytes: 1536 }),
|
||||
jobCount: 2,
|
||||
activeJobs: expect.arrayContaining([
|
||||
expect.objectContaining({ id: '123', finished: false }),
|
||||
expect.objectContaining({ id: '456', finished: false }),
|
||||
]),
|
||||
});
|
||||
});
|
||||
|
||||
it('should return not running when no job IDs', () => {
|
||||
const mockRunningJobs = { jobids: [] };
|
||||
const mockJobStatuses = [] as PromiseSettledResult<any>[];
|
||||
|
||||
const result = service.parseBackupStatus(mockRunningJobs, mockJobStatuses);
|
||||
|
||||
expect(result).toEqual({
|
||||
isRunning: false,
|
||||
stats: null,
|
||||
jobCount: 0,
|
||||
activeJobs: [],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseJobWithStats', () => {
|
||||
it('should parse job with enhanced stats', () => {
|
||||
const mockJobStatus = {
|
||||
stats: { bytes: 1024, speed: 512 },
|
||||
};
|
||||
|
||||
const result = service.parseJobWithStats('123', mockJobStatus);
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
id: '123',
|
||||
stats: expect.objectContaining({
|
||||
bytes: 1024,
|
||||
speed: 512,
|
||||
formattedBytes: '1024 B',
|
||||
formattedSpeed: '512 B/s',
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle missing stats', () => {
|
||||
const mockJobStatus = {};
|
||||
|
||||
const result = service.parseJobWithStats('123', mockJobStatus);
|
||||
|
||||
expect(result.id).toBe('123');
|
||||
expect(result.stats).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseAllJobsWithStats', () => {
|
||||
it('should return jobs when job IDs exist', () => {
|
||||
const mockRunningJobs = { jobids: ['123', '456'] };
|
||||
const mockJobs = [
|
||||
{ id: '123', group: 'unraid-backup' },
|
||||
{ id: '456', group: 'unraid-backup' },
|
||||
] as any[];
|
||||
|
||||
const result = service.parseAllJobsWithStats(mockRunningJobs, mockJobs);
|
||||
|
||||
expect(result).toEqual(mockJobs);
|
||||
});
|
||||
|
||||
it('should return empty array when no job IDs', () => {
|
||||
const mockRunningJobs = { jobids: [] };
|
||||
const mockJobs = [] as any[];
|
||||
|
||||
const result = service.parseAllJobsWithStats(mockRunningJobs, mockJobs);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseJobsWithStats', () => {
|
||||
it('should parse fulfilled job statuses', () => {
|
||||
const mockJobStatuses = [
|
||||
{ status: 'fulfilled', value: { id: '123', stats: { bytes: 1024 } } },
|
||||
{ status: 'fulfilled', value: { id: '456', stats: { bytes: 512 } } },
|
||||
{ status: 'rejected', reason: 'Error' },
|
||||
] as PromiseSettledResult<any>[];
|
||||
|
||||
const result = service.parseJobsWithStats(mockJobStatuses);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0]).toEqual(
|
||||
expect.objectContaining({
|
||||
id: '123',
|
||||
stats: expect.objectContaining({ bytes: 1024, formattedBytes: '1024 B' }),
|
||||
})
|
||||
);
|
||||
expect(result[1]).toEqual(
|
||||
expect.objectContaining({
|
||||
id: '456',
|
||||
stats: expect.objectContaining({ bytes: 512, formattedBytes: '512 B' }),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle rejected statuses gracefully', () => {
|
||||
const mockJobStatuses = [
|
||||
{ status: 'rejected', reason: 'Error' },
|
||||
] as PromiseSettledResult<any>[];
|
||||
|
||||
const result = service.parseJobsWithStats(mockJobStatuses);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getBackupStatus', () => {
|
||||
it('should return default backup status', () => {
|
||||
const result = service.getBackupStatus();
|
||||
|
||||
expect(result).toEqual({
|
||||
isRunning: false,
|
||||
stats: null,
|
||||
jobCount: 0,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,268 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
|
||||
import { BackupJobStatus } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-status.model.js';
|
||||
import {
|
||||
RCloneJob,
|
||||
RCloneJobListResponse,
|
||||
RCloneJobStats,
|
||||
RCloneJobWithStats,
|
||||
} from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
|
||||
import { FormatService } from '@app/unraid-api/utils/format.service.js';
|
||||
|
||||
// Internal interface for job status response from RClone API
|
||||
interface RCloneJobStatusResponse {
|
||||
id?: string | number;
|
||||
group?: string;
|
||||
stats?: RCloneJobStats;
|
||||
finished?: boolean;
|
||||
error?: string;
|
||||
[key: string]: any;
|
||||
}
|
||||
|
||||
interface BackupStatusResult {
|
||||
isRunning: boolean;
|
||||
stats: RCloneJobStats | null;
|
||||
jobCount: number;
|
||||
activeJobs: RCloneJobStatusResponse[];
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class RCloneStatusService {
|
||||
private readonly logger = new Logger(RCloneStatusService.name);
|
||||
|
||||
constructor(private readonly formatService: FormatService) {}
|
||||
|
||||
enhanceStatsWithFormattedFields(stats: RCloneJobStats): RCloneJobStats {
|
||||
const enhancedStats = { ...stats };
|
||||
|
||||
const isFinished =
|
||||
stats.fatalError === false &&
|
||||
stats.transfers === (stats.totalTransfers || 0) &&
|
||||
(stats.totalTransfers || 0) > 0;
|
||||
|
||||
// Format bytes
|
||||
if (stats.bytes !== undefined && stats.bytes !== null) {
|
||||
enhancedStats.formattedBytes = this.formatService.formatBytes(stats.bytes);
|
||||
}
|
||||
|
||||
// Handle speed formatting and reset for finished jobs
|
||||
if (isFinished && stats.speed !== undefined && stats.speed !== null) {
|
||||
enhancedStats.speed = 0;
|
||||
}
|
||||
|
||||
if (stats.speed !== undefined && stats.speed !== null && stats.speed > 0) {
|
||||
enhancedStats.formattedSpeed = this.formatService.formatSpeed(stats.speed);
|
||||
} else {
|
||||
enhancedStats.formattedSpeed = '0 B/s';
|
||||
}
|
||||
|
||||
// Format elapsed time
|
||||
if (stats.elapsedTime !== undefined && stats.elapsedTime !== null) {
|
||||
enhancedStats.formattedElapsedTime = this.formatService.formatDuration(stats.elapsedTime);
|
||||
} else {
|
||||
enhancedStats.formattedElapsedTime = '0s';
|
||||
}
|
||||
|
||||
// Format ETA
|
||||
if (stats.eta !== undefined && stats.eta !== null && stats.eta > 0) {
|
||||
enhancedStats.formattedEta = this.formatService.formatDuration(stats.eta);
|
||||
} else {
|
||||
enhancedStats.formattedEta = 'Unknown';
|
||||
}
|
||||
|
||||
// Calculate percentage fallback (what frontend currently does)
|
||||
let calculatedPercentage = stats.percentage;
|
||||
if (calculatedPercentage === null || calculatedPercentage === undefined) {
|
||||
if (stats.bytes && stats.totalBytes && stats.totalBytes > 0) {
|
||||
calculatedPercentage = Math.round((stats.bytes / stats.totalBytes) * 100);
|
||||
}
|
||||
}
|
||||
|
||||
// For completed jobs, ensure percentage is 100
|
||||
if (isFinished && calculatedPercentage !== null && calculatedPercentage !== undefined) {
|
||||
calculatedPercentage = 100;
|
||||
}
|
||||
|
||||
enhancedStats.calculatedPercentage = Math.round(calculatedPercentage || 0);
|
||||
|
||||
// Determine if actively running (what frontend currently calculates)
|
||||
const isActivelyTransferring =
|
||||
stats.transferring && Array.isArray(stats.transferring) && stats.transferring.length > 0;
|
||||
const isActivelyChecking =
|
||||
stats.checking && Array.isArray(stats.checking) && stats.checking.length > 0;
|
||||
const hasActiveSpeed = (stats.speed || 0) > 0;
|
||||
const isNotFinished = !isFinished && stats.fatalError !== true;
|
||||
|
||||
enhancedStats.isActivelyRunning =
|
||||
(isActivelyTransferring || isActivelyChecking || hasActiveSpeed) && isNotFinished;
|
||||
enhancedStats.isCompleted = isFinished;
|
||||
|
||||
return enhancedStats;
|
||||
}
|
||||
|
||||
transformStatsToJob(jobId: string | number, stats: RCloneJobStats): RCloneJob {
|
||||
this.logger.debug(`Stats for job ${jobId}: %o`, stats);
|
||||
const group = stats.group || undefined;
|
||||
|
||||
this.logger.debug(`Processing job ${jobId}: group="${group}"`);
|
||||
|
||||
const isFinished =
|
||||
stats.fatalError === false &&
|
||||
stats.transfers === (stats.totalTransfers || 0) &&
|
||||
(stats.totalTransfers || 0) > 0;
|
||||
|
||||
const hasError = Boolean(stats.lastError);
|
||||
const isCancelled = stats.lastError === 'context canceled';
|
||||
|
||||
// Determine status
|
||||
let status: BackupJobStatus;
|
||||
|
||||
if (hasError) {
|
||||
if (isCancelled) {
|
||||
status = BackupJobStatus.CANCELLED;
|
||||
} else {
|
||||
status = BackupJobStatus.FAILED;
|
||||
}
|
||||
} else if (isFinished || stats.calculatedPercentage === 100) {
|
||||
status = BackupJobStatus.COMPLETED;
|
||||
} else {
|
||||
status = BackupJobStatus.RUNNING;
|
||||
}
|
||||
|
||||
return {
|
||||
id: String(jobId),
|
||||
group: group,
|
||||
stats,
|
||||
finished: isFinished,
|
||||
success: stats.fatalError === false && (stats.errors || 0) === 0,
|
||||
error: stats.lastError || undefined,
|
||||
progressPercentage: stats.calculatedPercentage || stats.percentage,
|
||||
status,
|
||||
hasRecentJob: true, // If we have a job object, there's a recent job
|
||||
};
|
||||
}
|
||||
|
||||
calculateCombinedStats(activeJobs: RCloneJobStatusResponse[]): RCloneJobStats | null {
|
||||
if (activeJobs.length === 0) return null;
|
||||
|
||||
const validStats = activeJobs
|
||||
.map((job) => job.stats)
|
||||
.filter((stats): stats is RCloneJobStats => Boolean(stats));
|
||||
|
||||
if (validStats.length === 0) return null;
|
||||
|
||||
return validStats.reduce(
|
||||
(combined, stats) => ({
|
||||
bytes: (combined.bytes || 0) + (stats.bytes || 0),
|
||||
checks: (combined.checks || 0) + (stats.checks || 0),
|
||||
transfers: (combined.transfers || 0) + (stats.transfers || 0),
|
||||
totalBytes: (combined.totalBytes || 0) + (stats.totalBytes || 0),
|
||||
totalChecks: (combined.totalChecks || 0) + (stats.totalChecks || 0),
|
||||
totalTransfers: (combined.totalTransfers || 0) + (stats.totalTransfers || 0),
|
||||
speed: Math.max(combined.speed || 0, stats.speed || 0),
|
||||
eta: Math.max(combined.eta || 0, stats.eta || 0),
|
||||
}),
|
||||
{} as RCloneJobStats
|
||||
);
|
||||
}
|
||||
|
||||
parseActiveJobs(
|
||||
jobStatuses: PromiseSettledResult<RCloneJobStatusResponse>[]
|
||||
): RCloneJobStatusResponse[] {
|
||||
const activeJobs: RCloneJobStatusResponse[] = [];
|
||||
|
||||
this.logger.debug(`Job statuses: ${JSON.stringify(jobStatuses)}`);
|
||||
|
||||
jobStatuses.forEach((result, index) => {
|
||||
if (result.status === 'fulfilled' && !result.value.finished) {
|
||||
activeJobs.push(result.value);
|
||||
} else if (result.status === 'rejected') {
|
||||
this.logger.warn(`Failed to get status for job ${index}: ${result.reason}`);
|
||||
}
|
||||
});
|
||||
|
||||
return activeJobs;
|
||||
}
|
||||
|
||||
parseBackupStatus(
|
||||
runningJobs: RCloneJobListResponse,
|
||||
jobStatuses: PromiseSettledResult<RCloneJobStatusResponse>[]
|
||||
): BackupStatusResult {
|
||||
if (!runningJobs.jobids?.length) {
|
||||
return {
|
||||
isRunning: false,
|
||||
stats: null,
|
||||
jobCount: 0,
|
||||
activeJobs: [],
|
||||
};
|
||||
}
|
||||
|
||||
const activeJobs = this.parseActiveJobs(jobStatuses);
|
||||
const combinedStats = this.calculateCombinedStats(activeJobs);
|
||||
|
||||
return {
|
||||
isRunning: activeJobs.length > 0,
|
||||
stats: combinedStats,
|
||||
jobCount: activeJobs.length,
|
||||
activeJobs,
|
||||
};
|
||||
}
|
||||
|
||||
parseJobWithStats(jobId: string, jobStatus: RCloneJobStatusResponse): RCloneJob {
|
||||
const stats = jobStatus.stats ? this.enhanceStatsWithFormattedFields(jobStatus.stats) : {};
|
||||
return this.transformStatsToJob(jobId, stats);
|
||||
}
|
||||
|
||||
parseAllJobsWithStats(runningJobs: RCloneJobListResponse, jobs: RCloneJob[]): RCloneJob[] {
|
||||
if (!runningJobs.jobids?.length) {
|
||||
this.logger.log('No active jobs found in RClone');
|
||||
return [];
|
||||
}
|
||||
|
||||
this.logger.log(
|
||||
`Found ${runningJobs.jobids.length} active jobs in RClone: [${runningJobs.jobids.join(', ')}]`
|
||||
);
|
||||
|
||||
return jobs;
|
||||
}
|
||||
|
||||
parseJobsWithStats(jobStatuses: PromiseSettledResult<RCloneJobStatusResponse>[]): RCloneJob[] {
|
||||
const allJobs: RCloneJob[] = [];
|
||||
|
||||
jobStatuses.forEach((result, index) => {
|
||||
if (result.status === 'fulfilled') {
|
||||
const jobStatus = result.value;
|
||||
const stats = jobStatus.stats
|
||||
? this.enhanceStatsWithFormattedFields(jobStatus.stats)
|
||||
: {};
|
||||
const job = this.transformStatsToJob(jobStatus.id || index, stats);
|
||||
allJobs.push(job);
|
||||
} else {
|
||||
this.logger.error(`Failed to get status for job ${index}: ${result.reason}`);
|
||||
}
|
||||
});
|
||||
|
||||
return allJobs;
|
||||
}
|
||||
|
||||
getBackupStatus(): {
|
||||
isRunning: boolean;
|
||||
stats: RCloneJobStats | null;
|
||||
jobCount: number;
|
||||
} {
|
||||
try {
|
||||
return {
|
||||
isRunning: false,
|
||||
stats: null,
|
||||
jobCount: 0,
|
||||
};
|
||||
} catch (error) {
|
||||
this.logger.debug(`Error getting backup status: ${error}`);
|
||||
return {
|
||||
isRunning: false,
|
||||
stats: null,
|
||||
jobCount: 0,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,11 @@
|
||||
import { Field, ID, InputType, ObjectType } from '@nestjs/graphql';
|
||||
|
||||
import { type Layout } from '@jsonforms/core';
|
||||
import { PrefixedID } from '@unraid/shared/prefixed-id-scalar.js';
|
||||
import { IsBoolean, IsObject, IsOptional, IsString } from 'class-validator';
|
||||
import { GraphQLJSON } from 'graphql-scalars';
|
||||
|
||||
import { BackupJobStatus } from '@app/unraid-api/graph/resolvers/backup/orchestration/backup-job-status.model.js';
|
||||
import { DataSlice } from '@app/unraid-api/types/json-forms.js';
|
||||
|
||||
@ObjectType()
|
||||
@@ -147,6 +149,19 @@ export class RCloneStartBackupInput {
|
||||
@IsString()
|
||||
dstPath!: string;
|
||||
|
||||
@Field(() => Boolean, { nullable: true, defaultValue: false })
|
||||
@IsOptional()
|
||||
@IsBoolean()
|
||||
async?: boolean;
|
||||
|
||||
@Field(() => String, {
|
||||
nullable: true,
|
||||
description: 'Configuration ID for job grouping and identification',
|
||||
})
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
configId?: string;
|
||||
|
||||
@Field(() => GraphQLJSON, { nullable: true })
|
||||
@IsOptional()
|
||||
@IsObject()
|
||||
@@ -206,3 +221,189 @@ export class GetRCloneJobStatusDto {
|
||||
@IsString()
|
||||
jobId!: string;
|
||||
}
|
||||
|
||||
@ObjectType()
|
||||
export class RCloneJobStats {
|
||||
@Field(() => Number, { description: 'Bytes transferred', nullable: true })
|
||||
bytes?: number;
|
||||
|
||||
@Field(() => Number, { description: 'Transfer speed in bytes/sec', nullable: true })
|
||||
speed?: number;
|
||||
|
||||
@Field(() => Number, { description: 'Estimated time to completion in seconds', nullable: true })
|
||||
eta?: number;
|
||||
|
||||
@Field(() => Number, { description: 'Elapsed time in seconds', nullable: true })
|
||||
elapsedTime?: number;
|
||||
|
||||
@Field(() => Number, { description: 'Progress percentage (0-100)', nullable: true })
|
||||
percentage?: number;
|
||||
|
||||
@Field(() => Number, { description: 'Number of checks completed', nullable: true })
|
||||
checks?: number;
|
||||
|
||||
@Field(() => Number, { description: 'Number of deletes completed', nullable: true })
|
||||
deletes?: number;
|
||||
|
||||
@Field(() => Number, { description: 'Number of errors encountered', nullable: true })
|
||||
errors?: number;
|
||||
|
||||
@Field(() => Boolean, { description: 'Whether a fatal error occurred', nullable: true })
|
||||
fatalError?: boolean;
|
||||
|
||||
@Field(() => String, { description: 'Last error message', nullable: true })
|
||||
lastError?: string;
|
||||
|
||||
@Field(() => Number, { description: 'Number of renames completed', nullable: true })
|
||||
renames?: number;
|
||||
|
||||
@Field(() => Boolean, { description: 'Whether there is a retry error', nullable: true })
|
||||
retryError?: boolean;
|
||||
|
||||
@Field(() => Number, { description: 'Number of server-side copies', nullable: true })
|
||||
serverSideCopies?: number;
|
||||
|
||||
@Field(() => Number, { description: 'Bytes in server-side copies', nullable: true })
|
||||
serverSideCopyBytes?: number;
|
||||
|
||||
@Field(() => Number, { description: 'Number of server-side moves', nullable: true })
|
||||
serverSideMoves?: number;
|
||||
|
||||
@Field(() => Number, { description: 'Bytes in server-side moves', nullable: true })
|
||||
serverSideMoveBytes?: number;
|
||||
|
||||
@Field(() => Number, { description: 'Total bytes to transfer', nullable: true })
|
||||
totalBytes?: number;
|
||||
|
||||
@Field(() => Number, { description: 'Total checks to perform', nullable: true })
|
||||
totalChecks?: number;
|
||||
|
||||
@Field(() => Number, { description: 'Total transfers to perform', nullable: true })
|
||||
totalTransfers?: number;
|
||||
|
||||
@Field(() => Number, { description: 'Time spent transferring in seconds', nullable: true })
|
||||
transferTime?: number;
|
||||
|
||||
@Field(() => Number, { description: 'Number of transfers completed', nullable: true })
|
||||
transfers?: number;
|
||||
|
||||
@Field(() => GraphQLJSON, { description: 'Currently transferring files', nullable: true })
|
||||
transferring?: any[];
|
||||
|
||||
@Field(() => GraphQLJSON, { description: 'Currently checking files', nullable: true })
|
||||
checking?: any[];
|
||||
|
||||
// Formatted fields
|
||||
@Field(() => String, { description: 'Human-readable bytes transferred', nullable: true })
|
||||
formattedBytes?: string;
|
||||
|
||||
@Field(() => String, { description: 'Human-readable transfer speed', nullable: true })
|
||||
formattedSpeed?: string;
|
||||
|
||||
@Field(() => String, { description: 'Human-readable elapsed time', nullable: true })
|
||||
formattedElapsedTime?: string;
|
||||
|
||||
@Field(() => String, { description: 'Human-readable ETA', nullable: true })
|
||||
formattedEta?: string;
|
||||
|
||||
// Computed fields that frontend currently calculates
|
||||
@Field(() => Number, {
|
||||
description: 'Calculated percentage (fallback when percentage is null)',
|
||||
nullable: true,
|
||||
})
|
||||
calculatedPercentage?: number;
|
||||
|
||||
@Field(() => Boolean, { description: 'Whether the job is actively running', nullable: true })
|
||||
isActivelyRunning?: boolean;
|
||||
|
||||
@Field(() => Boolean, { description: 'Whether the job is completed', nullable: true })
|
||||
isCompleted?: boolean;
|
||||
|
||||
// Allow additional fields
|
||||
[key: string]: any;
|
||||
}
|
||||
|
||||
@ObjectType()
|
||||
export class RCloneJob {
|
||||
@Field(() => PrefixedID, { description: 'Job ID' })
|
||||
id!: string;
|
||||
|
||||
@Field(() => String, { description: 'RClone group for the job', nullable: true })
|
||||
group?: string;
|
||||
|
||||
@Field(() => RCloneJobStats, { description: 'Job status and statistics', nullable: true })
|
||||
stats?: RCloneJobStats;
|
||||
|
||||
@Field(() => Number, { description: 'Progress percentage (0-100)', nullable: true })
|
||||
progressPercentage?: number;
|
||||
|
||||
@Field(() => PrefixedID, { description: 'Configuration ID that triggered this job', nullable: true })
|
||||
configId?: string;
|
||||
|
||||
@Field(() => BackupJobStatus, { description: 'Current status of the job', nullable: true })
|
||||
status?: BackupJobStatus;
|
||||
|
||||
@Field(() => Boolean, { description: 'Whether the job is finished', nullable: true })
|
||||
finished?: boolean;
|
||||
|
||||
@Field(() => Boolean, { description: 'Whether the job was successful', nullable: true })
|
||||
success?: boolean;
|
||||
|
||||
@Field(() => String, { description: 'Error message if job failed', nullable: true })
|
||||
error?: string;
|
||||
|
||||
// Computed fields that frontend currently calculates
|
||||
@Field(() => Boolean, { description: 'Whether the job is actively running', nullable: true })
|
||||
isRunning?: boolean;
|
||||
|
||||
@Field(() => String, { description: 'Error message for display', nullable: true })
|
||||
errorMessage?: string;
|
||||
|
||||
@Field(() => Boolean, { description: 'Whether there is a recent job', nullable: true })
|
||||
hasRecentJob?: boolean;
|
||||
}
|
||||
|
||||
@ObjectType()
|
||||
export class RCloneJobStatusDto {
|
||||
@Field(() => Number, { description: 'Job ID' })
|
||||
id!: number;
|
||||
|
||||
@Field(() => String, { description: 'RClone group for the job' })
|
||||
group!: string;
|
||||
|
||||
@Field(() => Boolean, { description: 'Whether the job is finished' })
|
||||
finished!: boolean;
|
||||
|
||||
@Field(() => Boolean, { description: 'Whether the job was successful' })
|
||||
success!: boolean;
|
||||
|
||||
@Field(() => String, { description: 'Error message if any' })
|
||||
error!: string;
|
||||
|
||||
@Field(() => Number, { description: 'Job duration in seconds' })
|
||||
duration!: number;
|
||||
|
||||
@Field(() => String, { description: 'Job start time in ISO format' })
|
||||
startTime!: string;
|
||||
|
||||
@Field(() => String, { description: 'Job end time in ISO format' })
|
||||
endTime!: string;
|
||||
|
||||
@Field(() => GraphQLJSON, { description: 'Job output data', nullable: true })
|
||||
output?: Record<string, any>;
|
||||
}
|
||||
|
||||
// API Response Types (for internal use)
|
||||
export interface RCloneJobListResponse {
|
||||
jobids: (string | number)[];
|
||||
}
|
||||
|
||||
export interface RCloneJobWithStats {
|
||||
jobId: string | number;
|
||||
stats: RCloneJobStats;
|
||||
}
|
||||
|
||||
export interface RCloneJobsWithStatsResponse {
|
||||
jobids: (string | number)[];
|
||||
stats: RCloneJobStats[];
|
||||
}
|
||||
|
||||
@@ -1,20 +1,24 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { forwardRef, Module } from '@nestjs/common';
|
||||
|
||||
import { BackupSourceModule } from '@app/unraid-api/graph/resolvers/backup/source/backup-source.module.js';
|
||||
import { RCloneApiService } from '@app/unraid-api/graph/resolvers/rclone/rclone-api.service.js';
|
||||
import { RCloneFormService } from '@app/unraid-api/graph/resolvers/rclone/rclone-form.service.js';
|
||||
import { RCloneStatusService } from '@app/unraid-api/graph/resolvers/rclone/rclone-status.service.js';
|
||||
import { RCloneMutationsResolver } from '@app/unraid-api/graph/resolvers/rclone/rclone.mutation.resolver.js';
|
||||
import { RCloneBackupSettingsResolver } from '@app/unraid-api/graph/resolvers/rclone/rclone.resolver.js';
|
||||
import { RCloneService } from '@app/unraid-api/graph/resolvers/rclone/rclone.service.js';
|
||||
import { UtilsModule } from '@app/unraid-api/utils/utils.module.js';
|
||||
|
||||
@Module({
|
||||
imports: [],
|
||||
imports: [UtilsModule, forwardRef(() => BackupSourceModule)],
|
||||
providers: [
|
||||
RCloneService,
|
||||
RCloneApiService,
|
||||
RCloneStatusService,
|
||||
RCloneFormService,
|
||||
RCloneBackupSettingsResolver,
|
||||
RCloneMutationsResolver,
|
||||
],
|
||||
exports: [RCloneService, RCloneApiService],
|
||||
exports: [RCloneService, RCloneApiService, RCloneStatusService],
|
||||
})
|
||||
export class RCloneModule {}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Logger } from '@nestjs/common';
|
||||
import { Args, ResolveField, Resolver } from '@nestjs/graphql';
|
||||
|
||||
import { Resource } from '@unraid/shared/graphql.model.js';
|
||||
import { Resource } from '@unraid/shared/graphql.model';
|
||||
import {
|
||||
AuthActionVerb,
|
||||
AuthPossession,
|
||||
@@ -14,6 +14,7 @@ import {
|
||||
CreateRCloneRemoteInput,
|
||||
DeleteRCloneRemoteInput,
|
||||
RCloneRemote,
|
||||
RCloneRemoteConfig,
|
||||
} from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
|
||||
|
||||
/**
|
||||
@@ -38,7 +39,7 @@ export class RCloneMutationsResolver {
|
||||
name: input.name,
|
||||
type: input.type,
|
||||
parameters: {},
|
||||
config,
|
||||
config: config as RCloneRemoteConfig,
|
||||
};
|
||||
} catch (error) {
|
||||
this.logger.error(`Error creating remote: ${error}`);
|
||||
|
||||
@@ -5,13 +5,13 @@ import { type Layout } from '@jsonforms/core';
|
||||
import type { SettingSlice } from '@app/unraid-api/types/json-forms.js';
|
||||
import { RCloneApiService } from '@app/unraid-api/graph/resolvers/rclone/rclone-api.service.js';
|
||||
import { RCloneFormService } from '@app/unraid-api/graph/resolvers/rclone/rclone-form.service.js';
|
||||
import { RCloneRemote } from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
|
||||
import { RCloneJob, RCloneRemote } from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
|
||||
|
||||
/**
|
||||
* Types for rclone backup configuration UI
|
||||
*/
|
||||
export interface RcloneBackupConfigValues {
|
||||
configStep: number;
|
||||
configStep: { current: number; total: number };
|
||||
showAdvanced: boolean;
|
||||
name?: string;
|
||||
type?: string;
|
||||
@@ -48,7 +48,7 @@ export class RCloneService {
|
||||
*/
|
||||
async onModuleInit(): Promise<void> {
|
||||
try {
|
||||
if (!this.rcloneApiService.initialized) {
|
||||
if (!this.rcloneApiService.isInitialized) {
|
||||
this.logger.warn(
|
||||
'RClone API service is not initialized, skipping provider info loading'
|
||||
);
|
||||
@@ -83,7 +83,7 @@ export class RCloneService {
|
||||
*/
|
||||
async getCurrentSettings(): Promise<RcloneBackupConfigValues> {
|
||||
return {
|
||||
configStep: 0,
|
||||
configStep: { current: 0, total: 0 },
|
||||
showAdvanced: false,
|
||||
};
|
||||
}
|
||||
@@ -125,4 +125,11 @@ export class RCloneService {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets enhanced job status with computed fields
|
||||
*/
|
||||
async getEnhancedJobStatus(jobId: string, configId?: string): Promise<RCloneJob | null> {
|
||||
return this.rcloneApiService.getEnhancedJobStatus(jobId, configId);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,15 +2,14 @@ import { Module } from '@nestjs/common';
|
||||
|
||||
import { AuthModule } from '@app/unraid-api/auth/auth.module.js';
|
||||
import { ApiKeyModule } from '@app/unraid-api/graph/resolvers/api-key/api-key.module.js';
|
||||
import { ApiKeyResolver } from '@app/unraid-api/graph/resolvers/api-key/api-key.resolver.js';
|
||||
import { ArrayModule } from '@app/unraid-api/graph/resolvers/array/array.module.js';
|
||||
import { BackupModule } from '@app/unraid-api/graph/resolvers/backup/backup.module.js';
|
||||
import { ConfigResolver } from '@app/unraid-api/graph/resolvers/config/config.resolver.js';
|
||||
import { CustomizationModule } from '@app/unraid-api/graph/resolvers/customization/customization.module.js';
|
||||
import { DisksModule } from '@app/unraid-api/graph/resolvers/disks/disks.module.js';
|
||||
import { DisplayResolver } from '@app/unraid-api/graph/resolvers/display/display.resolver.js';
|
||||
import { DisplayService } from '@app/unraid-api/graph/resolvers/display/display.service.js';
|
||||
import { DockerModule } from '@app/unraid-api/graph/resolvers/docker/docker.module.js';
|
||||
import { FlashBackupModule } from '@app/unraid-api/graph/resolvers/flash-backup/flash-backup.module.js';
|
||||
import { FlashResolver } from '@app/unraid-api/graph/resolvers/flash/flash.resolver.js';
|
||||
import { DevicesResolver } from '@app/unraid-api/graph/resolvers/info/devices.resolver.js';
|
||||
import { DevicesService } from '@app/unraid-api/graph/resolvers/info/devices.service.js';
|
||||
@@ -34,16 +33,18 @@ import { VmsService } from '@app/unraid-api/graph/resolvers/vms/vms.service.js';
|
||||
import { ServicesResolver } from '@app/unraid-api/graph/services/services.resolver.js';
|
||||
import { SharesResolver } from '@app/unraid-api/graph/shares/shares.resolver.js';
|
||||
import { MeResolver } from '@app/unraid-api/graph/user/user.resolver.js';
|
||||
import { UtilsModule } from '@app/unraid-api/utils/utils.module.js';
|
||||
|
||||
@Module({
|
||||
imports: [
|
||||
UtilsModule,
|
||||
ArrayModule,
|
||||
ApiKeyModule,
|
||||
AuthModule,
|
||||
BackupModule,
|
||||
CustomizationModule,
|
||||
DockerModule,
|
||||
DisksModule,
|
||||
FlashBackupModule,
|
||||
RCloneModule,
|
||||
SettingsModule,
|
||||
],
|
||||
|
||||
10
api/src/unraid-api/graph/utils/utils.module.ts
Normal file
10
api/src/unraid-api/graph/utils/utils.module.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { Global, Module } from '@nestjs/common';
|
||||
|
||||
import { FormatService } from '@app/unraid-api/utils/format.service.js';
|
||||
|
||||
@Global()
|
||||
@Module({
|
||||
providers: [FormatService],
|
||||
exports: [FormatService],
|
||||
})
|
||||
export class UtilsModule {}
|
||||
@@ -18,7 +18,8 @@ export async function bootstrapNestServer(): Promise<NestFastifyApplication> {
|
||||
|
||||
const app = await NestFactory.create<NestFastifyApplication>(AppModule, new FastifyAdapter(), {
|
||||
bufferLogs: false,
|
||||
...(LOG_LEVEL !== 'TRACE' ? { logger: false } : {}),
|
||||
|
||||
...(LOG_LEVEL !== 'DEBUG' ? { logger: false } : {}),
|
||||
});
|
||||
|
||||
// Enable validation globally
|
||||
|
||||
@@ -1 +1 @@
|
||||
1751630630443
|
||||
1752326314433
|
||||
@@ -1 +1 @@
|
||||
1751630630198
|
||||
1752326314052
|
||||
@@ -1 +1 @@
|
||||
1751630630343
|
||||
1752326314199
|
||||
@@ -1 +1 @@
|
||||
1751630630571
|
||||
1752326314557
|
||||
@@ -1 +1 @@
|
||||
1751630630810
|
||||
1752326314785
|
||||
59
api/src/unraid-api/utils/format.service.test.ts
Normal file
59
api/src/unraid-api/utils/format.service.test.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
import { FormatService } from '@app/unraid-api/utils/format.service.js';
|
||||
|
||||
describe('FormatService', () => {
|
||||
const service = new FormatService();
|
||||
|
||||
describe('formatBytes', () => {
|
||||
it('should format zero bytes', () => {
|
||||
expect(service.formatBytes(0)).toBe('0 B');
|
||||
});
|
||||
|
||||
it('should format bytes to best unit', () => {
|
||||
expect(service.formatBytes(1024)).toBe('1.02 KB');
|
||||
expect(service.formatBytes(1048576)).toBe('1.05 MB');
|
||||
expect(service.formatBytes(1073741824)).toBe('1.07 GB');
|
||||
});
|
||||
|
||||
it('should format with decimals when needed', () => {
|
||||
expect(service.formatBytes(1536)).toBe('1.54 KB');
|
||||
expect(service.formatBytes(9636529)).toBe('9.64 MB');
|
||||
});
|
||||
});
|
||||
|
||||
describe('formatSpeed', () => {
|
||||
it('should format zero speed', () => {
|
||||
expect(service.formatSpeed(0)).toBe('0 B/s');
|
||||
});
|
||||
|
||||
it('should format speed with /s suffix', () => {
|
||||
expect(service.formatSpeed(1024)).toBe('1.02 KB/s');
|
||||
expect(service.formatSpeed(1048576)).toBe('1.05 MB/s');
|
||||
expect(service.formatSpeed(1073741824)).toBe('1.07 GB/s');
|
||||
});
|
||||
|
||||
it('should format with decimals when needed', () => {
|
||||
expect(service.formatSpeed(1536)).toBe('1.54 KB/s');
|
||||
expect(service.formatSpeed(9636529.183648435)).toBe('9.64 MB/s');
|
||||
});
|
||||
});
|
||||
|
||||
describe('formatDuration', () => {
|
||||
it('should format small durations in seconds', () => {
|
||||
expect(service.formatDuration(30)).toBe('30s');
|
||||
expect(service.formatDuration(45.5)).toBe('45.5s');
|
||||
});
|
||||
|
||||
it('should format longer durations to best unit', () => {
|
||||
expect(service.formatDuration(60)).toBe('60 s');
|
||||
expect(service.formatDuration(3600)).toBe('60 min');
|
||||
expect(service.formatDuration(86400)).toBe('24 h');
|
||||
});
|
||||
|
||||
it('should format with decimals when needed', () => {
|
||||
expect(service.formatDuration(90)).toBe('1.5 min');
|
||||
expect(service.formatDuration(11.615060290966666 * 60)).toBe('11.62 min');
|
||||
});
|
||||
});
|
||||
});
|
||||
33
api/src/unraid-api/utils/format.service.ts
Normal file
33
api/src/unraid-api/utils/format.service.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
|
||||
import { convert } from 'convert';
|
||||
|
||||
@Injectable()
|
||||
export class FormatService {
|
||||
formatBytes(bytes: number): string {
|
||||
if (bytes === 0) return '0 B';
|
||||
|
||||
const result = convert(bytes, 'bytes').to('best');
|
||||
const value =
|
||||
typeof result.quantity === 'number' ? Number(result.quantity.toFixed(2)) : result.quantity;
|
||||
return `${value} ${result.unit}`;
|
||||
}
|
||||
|
||||
formatSpeed(bytesPerSecond: number): string {
|
||||
if (bytesPerSecond === 0) return '0 B/s';
|
||||
|
||||
const result = convert(bytesPerSecond, 'bytes').to('best');
|
||||
const value =
|
||||
typeof result.quantity === 'number' ? Number(result.quantity.toFixed(2)) : result.quantity;
|
||||
return `${value} ${result.unit}/s`;
|
||||
}
|
||||
|
||||
formatDuration(seconds: number): string {
|
||||
if (seconds < 60) return `${Math.round(seconds * 100) / 100}s`;
|
||||
|
||||
const result = convert(seconds, 'seconds').to('best');
|
||||
const value =
|
||||
typeof result.quantity === 'number' ? Number(result.quantity.toFixed(2)) : result.quantity;
|
||||
return `${value} ${result.unit}`;
|
||||
}
|
||||
}
|
||||
10
api/src/unraid-api/utils/utils.module.ts
Normal file
10
api/src/unraid-api/utils/utils.module.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { Global, Module } from '@nestjs/common';
|
||||
|
||||
import { FormatService } from '@app/unraid-api/utils/format.service.js';
|
||||
|
||||
@Global()
|
||||
@Module({
|
||||
providers: [FormatService],
|
||||
exports: [FormatService],
|
||||
})
|
||||
export class UtilsModule {}
|
||||
@@ -7,7 +7,7 @@
|
||||
"build:watch": " pnpm -r --parallel build:watch",
|
||||
"dev": "pnpm -r dev",
|
||||
"unraid:deploy": "pnpm -r unraid:deploy",
|
||||
"test": "pnpm -r test",
|
||||
"test": "vitest",
|
||||
"lint": "pnpm -r lint",
|
||||
"lint:fix": "pnpm -r lint:fix",
|
||||
"type-check": "pnpm -r type-check",
|
||||
@@ -43,7 +43,8 @@
|
||||
"@manypkg/cli": "0.24.0",
|
||||
"chalk": "5.4.1",
|
||||
"diff": "8.0.2",
|
||||
"ignore": "7.0.5"
|
||||
"ignore": "7.0.5",
|
||||
"vitest": "3.2.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"lint-staged": "16.1.2",
|
||||
@@ -54,7 +55,7 @@
|
||||
},
|
||||
"lint-staged": {
|
||||
"*.{js,jsx,ts,tsx,vue}": [
|
||||
"pnpm lint:fix"
|
||||
"npx pnpm lint:fix"
|
||||
]
|
||||
},
|
||||
"packageManager": "pnpm@10.13.1"
|
||||
|
||||
@@ -3,11 +3,11 @@ import { EventEmitter2 } from '@nestjs/event-emitter';
|
||||
import { Args, Mutation, Query, ResolveField, Resolver } from '@nestjs/graphql';
|
||||
|
||||
import { type Layout } from '@jsonforms/core';
|
||||
import { GraphQLJSON } from 'graphql-scalars';
|
||||
import { Resource } from '@unraid/shared/graphql.model.js';
|
||||
import { DataSlice } from '@unraid/shared/jsonforms/settings.js';
|
||||
import { PrefixedID } from '@unraid/shared/prefixed-id-scalar.js';
|
||||
import { UsePermissions } from '@unraid/shared/use-permissions.directive.js';
|
||||
import { GraphQLJSON } from 'graphql-scalars';
|
||||
import { AuthActionVerb, AuthPossession } from 'nest-authz';
|
||||
|
||||
import { EVENTS } from '../helper/nest-tokens.js';
|
||||
|
||||
@@ -10,6 +10,7 @@ export enum Resource {
|
||||
ACTIVATION_CODE = 'ACTIVATION_CODE',
|
||||
API_KEY = 'API_KEY',
|
||||
ARRAY = 'ARRAY',
|
||||
BACKUP = 'BACKUP',
|
||||
CLOUD = 'CLOUD',
|
||||
CONFIG = 'CONFIG',
|
||||
CONNECT = 'CONNECT',
|
||||
|
||||
@@ -10,6 +10,7 @@ import { cleanupTxzFiles } from "./utils/cleanup";
|
||||
import { apiDir } from "./utils/paths";
|
||||
import { getVendorBundleName, getVendorFullPath } from "./build-vendor-store";
|
||||
import { getAssetUrl } from "./utils/bucket-urls";
|
||||
import { ensureRclone } from "./utils/rclone-helper";
|
||||
|
||||
|
||||
// Recursively search for manifest files
|
||||
|
||||
48
pnpm-lock.yaml
generated
48
pnpm-lock.yaml
generated
@@ -20,6 +20,9 @@ importers:
|
||||
ignore:
|
||||
specifier: 7.0.5
|
||||
version: 7.0.5
|
||||
vitest:
|
||||
specifier: 3.2.4
|
||||
version: 3.2.4(@types/node@22.16.3)(@vitest/ui@3.2.4)(happy-dom@18.0.1)(jiti@2.4.2)(jsdom@26.1.0)(stylus@0.57.0)(terser@5.43.1)(tsx@4.20.3)(yaml@2.8.0)
|
||||
devDependencies:
|
||||
lint-staged:
|
||||
specifier: 16.1.2
|
||||
@@ -157,8 +160,8 @@ importers:
|
||||
specifier: 1.0.2
|
||||
version: 1.0.2
|
||||
cron:
|
||||
specifier: 4.3.1
|
||||
version: 4.3.1
|
||||
specifier: 4.3.0
|
||||
version: 4.3.0
|
||||
cross-fetch:
|
||||
specifier: 4.1.0
|
||||
version: 4.1.0
|
||||
@@ -817,6 +820,9 @@ importers:
|
||||
'@vueuse/core':
|
||||
specifier: 13.5.0
|
||||
version: 13.5.0(vue@3.5.17(typescript@5.8.3))
|
||||
ajv:
|
||||
specifier: ^8.17.1
|
||||
version: 8.17.1
|
||||
class-variance-authority:
|
||||
specifier: 0.7.1
|
||||
version: 0.7.1
|
||||
@@ -848,6 +854,9 @@ importers:
|
||||
specifier: 1.3.2
|
||||
version: 1.3.2
|
||||
devDependencies:
|
||||
'@eslint/js':
|
||||
specifier: 9.30.1
|
||||
version: 9.30.1
|
||||
'@ianvs/prettier-plugin-sort-imports':
|
||||
specifier: 4.5.1
|
||||
version: 4.5.1(@vue/compiler-sfc@3.5.17)(prettier@3.6.2)
|
||||
@@ -980,6 +989,9 @@ importers:
|
||||
vue:
|
||||
specifier: 3.5.17
|
||||
version: 3.5.17(typescript@5.8.3)
|
||||
vue-eslint-parser:
|
||||
specifier: ^10.2.0
|
||||
version: 10.2.0(eslint@9.30.1(jiti@2.4.2))
|
||||
vue-tsc:
|
||||
specifier: 3.0.1
|
||||
version: 3.0.1(typescript@5.8.3)
|
||||
@@ -6509,10 +6521,6 @@ packages:
|
||||
resolution: {integrity: sha512-ciiYNLfSlF9MrDqnbMdRWFiA6oizSF7kA1osPP9lRzNu0Uu+AWog1UKy7SkckiDY2irrNjeO6qLyKnXC8oxmrw==}
|
||||
engines: {node: '>=18.x'}
|
||||
|
||||
cron@4.3.1:
|
||||
resolution: {integrity: sha512-7x7DoEOxV11t3OPWWMjj1xrL1PGkTV5RV+/54IJTZD7gStiaMploY43EkeBSkDZTLRbUwk+OISbQ0TR133oXyA==}
|
||||
engines: {node: '>=18.x'}
|
||||
|
||||
croner@4.1.97:
|
||||
resolution: {integrity: sha512-/f6gpQuxDaqXu+1kwQYSckUglPaOrHdbIlBAu0YuW8/Cdb45XwXYNUBXg3r/9Mo6n540Kn/smKcZWko5x99KrQ==}
|
||||
|
||||
@@ -12984,12 +12992,6 @@ packages:
|
||||
peerDependencies:
|
||||
vue: '>=2'
|
||||
|
||||
vue-eslint-parser@10.1.3:
|
||||
resolution: {integrity: sha512-dbCBnd2e02dYWsXoqX5yKUZlOt+ExIpq7hmHKPb5ZqKcjf++Eo0hMseFTZMLKThrUk61m+Uv6A2YSBve6ZvuDQ==}
|
||||
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
||||
peerDependencies:
|
||||
eslint: ^8.57.0 || ^9.0.0
|
||||
|
||||
vue-eslint-parser@10.2.0:
|
||||
resolution: {integrity: sha512-CydUvFOQKD928UzZhTp4pr2vWz1L+H99t7Pkln2QSPdvmURT0MoC4wUccfCnuEaihNsu9aYYyk+bep8rlfkUXw==}
|
||||
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
||||
@@ -19434,11 +19436,6 @@ snapshots:
|
||||
'@types/luxon': 3.6.2
|
||||
luxon: 3.6.1
|
||||
|
||||
cron@4.3.1:
|
||||
dependencies:
|
||||
'@types/luxon': 3.6.2
|
||||
luxon: 3.6.1
|
||||
|
||||
croner@4.1.97: {}
|
||||
|
||||
croner@9.1.0: {}
|
||||
@@ -20502,7 +20499,7 @@ snapshots:
|
||||
|
||||
eslint-plugin-import-x@4.15.2(@typescript-eslint/utils@8.36.0(eslint@9.30.1(jiti@2.4.2))(typescript@5.8.3))(eslint-import-resolver-node@0.3.9)(eslint@9.30.1(jiti@2.4.2)):
|
||||
dependencies:
|
||||
'@typescript-eslint/types': 8.34.1
|
||||
'@typescript-eslint/types': 8.36.0
|
||||
comment-parser: 1.4.1
|
||||
debug: 4.4.1(supports-color@5.5.0)
|
||||
eslint: 9.30.1(jiti@2.4.2)
|
||||
@@ -26861,19 +26858,6 @@ snapshots:
|
||||
vue: 3.5.17(typescript@5.8.3)
|
||||
vue-inbrowser-compiler-independent-utils: 4.71.1(vue@3.5.17(typescript@5.8.3))
|
||||
|
||||
vue-eslint-parser@10.1.3(eslint@9.30.1(jiti@2.4.2)):
|
||||
dependencies:
|
||||
debug: 4.4.1(supports-color@5.5.0)
|
||||
eslint: 9.30.1(jiti@2.4.2)
|
||||
eslint-scope: 8.4.0
|
||||
eslint-visitor-keys: 4.2.1
|
||||
espree: 10.4.0
|
||||
esquery: 1.6.0
|
||||
lodash: 4.17.21
|
||||
semver: 7.7.2
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
vue-eslint-parser@10.2.0(eslint@9.30.1(jiti@2.4.2)):
|
||||
dependencies:
|
||||
debug: 4.4.1(supports-color@5.5.0)
|
||||
@@ -26918,7 +26902,7 @@ snapshots:
|
||||
postcss-styl: 0.12.3
|
||||
recast-x: 1.0.5
|
||||
table: 6.9.0
|
||||
vue-eslint-parser: 10.1.3(eslint@9.30.1(jiti@2.4.2))
|
||||
vue-eslint-parser: 10.2.0(eslint@9.30.1(jiti@2.4.2))
|
||||
transitivePeerDependencies:
|
||||
- eslint
|
||||
- supports-color
|
||||
|
||||
@@ -1,15 +1,14 @@
|
||||
// For more info, see https://github.com/storybookjs/eslint-plugin-storybook#configuration-flat-config-format
|
||||
import storybook from "eslint-plugin-storybook";
|
||||
|
||||
import eslint from '@eslint/js';
|
||||
// @ts-expect-error No Declaration For This Plugin
|
||||
import importPlugin from 'eslint-plugin-import';
|
||||
import noRelativeImportPaths from 'eslint-plugin-no-relative-import-paths';
|
||||
import prettier from 'eslint-plugin-prettier';
|
||||
import vuePlugin from 'eslint-plugin-vue';
|
||||
import tseslint from 'typescript-eslint';
|
||||
// Import vue-eslint-parser as an ESM import
|
||||
import vueEslintParser from 'vue-eslint-parser';
|
||||
import storybook from 'eslint-plugin-storybook';
|
||||
// Import vue-eslint-parser as an ESM import
|
||||
|
||||
// Common rules shared across file types
|
||||
const commonRules = {
|
||||
|
||||
@@ -55,6 +55,7 @@
|
||||
"@jsonforms/vue": "3.6.0",
|
||||
"@jsonforms/vue-vanilla": "3.6.0",
|
||||
"@vueuse/core": "13.5.0",
|
||||
"ajv": "^8.17.1",
|
||||
"class-variance-authority": "0.7.1",
|
||||
"clsx": "2.1.1",
|
||||
"dompurify": "3.2.6",
|
||||
@@ -67,6 +68,7 @@
|
||||
"vue-sonner": "1.3.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "9.30.1",
|
||||
"@ianvs/prettier-plugin-sort-imports": "4.5.1",
|
||||
"@storybook/addon-docs": "9.0.16",
|
||||
"@storybook/addon-links": "9.0.16",
|
||||
@@ -111,6 +113,7 @@
|
||||
"vite-plugin-vue-devtools": "7.7.7",
|
||||
"vitest": "3.2.4",
|
||||
"vue": "3.5.17",
|
||||
"vue-eslint-parser": "^10.2.0",
|
||||
"vue-tsc": "3.0.1",
|
||||
"wrangler": "^3.114.10"
|
||||
},
|
||||
|
||||
@@ -6,8 +6,8 @@ export * from '@/components/common/dropdown-menu';
|
||||
export * from '@/components/common/loading';
|
||||
export * from '@/components/form/input';
|
||||
export * from '@/components/form/label';
|
||||
export * from '@/components/form/number';
|
||||
export * from '@/components/form/lightswitch';
|
||||
export * from '@/components/form/number';
|
||||
export * from '@/components/form/select';
|
||||
export * from '@/components/form/switch';
|
||||
export * from '@/components/common/scroll-area';
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
<script setup lang="ts">
|
||||
/**
|
||||
* @todo complete this component
|
||||
*/
|
||||
import { Switch, SwitchGroup, SwitchLabel } from '@headlessui/vue';
|
||||
import { ref } from 'vue';
|
||||
|
||||
|
||||
61
unraid-ui/src/forms/JsonForms.vue
Normal file
61
unraid-ui/src/forms/JsonForms.vue
Normal file
@@ -0,0 +1,61 @@
|
||||
<script lang="ts" setup>
|
||||
import { jsonFormsRenderers } from '@/forms/renderers';
|
||||
import type {
|
||||
JsonFormsCellRendererRegistryEntry,
|
||||
JsonFormsI18nState,
|
||||
JsonFormsRendererRegistryEntry,
|
||||
JsonFormsUISchemaRegistryEntry,
|
||||
JsonSchema,
|
||||
Middleware,
|
||||
UISchemaElement,
|
||||
ValidationMode,
|
||||
} from '@jsonforms/core';
|
||||
import { JsonForms as BaseJsonForms } from '@jsonforms/vue';
|
||||
import Ajv from 'ajv';
|
||||
import type { Ref } from 'vue';
|
||||
|
||||
const props = withDefaults(
|
||||
defineProps<{
|
||||
schema: JsonSchema;
|
||||
uischema?: UISchemaElement;
|
||||
data: Ref<Record<string, unknown>> | Record<string, unknown>;
|
||||
renderers?: JsonFormsRendererRegistryEntry[];
|
||||
cells?: JsonFormsCellRendererRegistryEntry[];
|
||||
config?: unknown;
|
||||
readonly?: boolean;
|
||||
uischemas?: JsonFormsUISchemaRegistryEntry[];
|
||||
validationMode?: ValidationMode;
|
||||
middleware?: Middleware;
|
||||
i18n?: JsonFormsI18nState;
|
||||
}>(),
|
||||
{
|
||||
renderers: () => jsonFormsRenderers,
|
||||
config: () => ({ restrict: false, trim: false, useDefaults: true }),
|
||||
validationMode: 'ValidateAndShow',
|
||||
}
|
||||
);
|
||||
const emit = defineEmits(['change']);
|
||||
const ajv = new Ajv({ allErrors: true, useDefaults: true, strict: false });
|
||||
function onChange(event: unknown): void {
|
||||
emit('change', event);
|
||||
}
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<BaseJsonForms
|
||||
:schema="props.schema"
|
||||
:uischema="props.uischema"
|
||||
:data="props.data"
|
||||
:renderers="props.renderers"
|
||||
:cells="props.cells"
|
||||
:config="props.config"
|
||||
:readonly="props.readonly"
|
||||
:uischemas="props.uischemas"
|
||||
:validation-mode="props.validationMode"
|
||||
:ajv="ajv"
|
||||
:middleware="props.middleware"
|
||||
:i18n="props.i18n"
|
||||
:additional-errors="undefined"
|
||||
@change="onChange"
|
||||
/>
|
||||
</template>
|
||||
@@ -5,14 +5,14 @@ import { type UISchemaElement } from '@jsonforms/core';
|
||||
import { rendererProps, useJsonFormsRenderer } from '@jsonforms/vue';
|
||||
import { computed, ref, watchEffect } from 'vue';
|
||||
|
||||
// Define a type for our specific Label UI Schema
|
||||
interface LabelUISchema extends UISchemaElement {
|
||||
// Define a type for our specific Label UI Schema using intersection type
|
||||
type LabelUISchema = UISchemaElement & {
|
||||
text?: string;
|
||||
options?: {
|
||||
description?: string;
|
||||
format?: 'title' | 'heading' | 'documentation' | string; // Add other formats as needed
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const props = defineProps(rendererProps<UISchemaElement>());
|
||||
|
||||
|
||||
@@ -59,7 +59,6 @@ const onChange = (value: unknown) => {
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
|
||||
<SelectItem v-else :value="option.value">
|
||||
<SelectItemText>{{ option.label }}</SelectItemText>
|
||||
</SelectItem>
|
||||
|
||||
@@ -13,10 +13,9 @@ import {
|
||||
type JsonFormsSubStates,
|
||||
type JsonSchema,
|
||||
type Layout,
|
||||
type UISchemaElement,
|
||||
} from '@jsonforms/core';
|
||||
import { DispatchRenderer, useJsonFormsLayout, type RendererProps } from '@jsonforms/vue';
|
||||
import { computed, inject, ref, type Ref } from 'vue';
|
||||
import { computed, inject, nextTick, onMounted, ref, type Ref } from 'vue';
|
||||
|
||||
// Define props based on RendererProps<Layout>
|
||||
const props = defineProps<RendererProps<Layout>>();
|
||||
@@ -48,15 +47,30 @@ const numSteps = computed(() => stepsConfig.value.length);
|
||||
// --- Current Step Logic --- Use injected core.data
|
||||
const currentStep = computed(() => {
|
||||
const stepData = core!.data?.configStep;
|
||||
// Handle both the new object format and the old number format
|
||||
|
||||
// Return current step if properly initialized
|
||||
if (typeof stepData === 'object' && stepData !== null && typeof stepData.current === 'number') {
|
||||
// Ensure step is within bounds
|
||||
return Math.max(0, Math.min(stepData.current, numSteps.value - 1));
|
||||
}
|
||||
// Fallback for initial state or old number format
|
||||
const numericStep = typeof stepData === 'number' ? stepData : 0;
|
||||
return Math.max(0, Math.min(numericStep, numSteps.value - 1));
|
||||
|
||||
// Return 0 as default if not initialized yet
|
||||
return 0;
|
||||
});
|
||||
|
||||
// Initialize configStep on mount
|
||||
onMounted(async () => {
|
||||
// Wait for next tick to ensure form data is available
|
||||
await nextTick();
|
||||
|
||||
const stepData = core!.data?.configStep;
|
||||
|
||||
// Only initialize if configStep doesn't exist or is in wrong format
|
||||
if (!stepData || typeof stepData !== 'object' || typeof stepData.current !== 'number') {
|
||||
const initialStep = { current: 0, total: numSteps.value };
|
||||
dispatch(Actions.update('configStep', () => initialStep));
|
||||
}
|
||||
});
|
||||
|
||||
const isLastStep = computed(() => numSteps.value > 0 && currentStep.value === numSteps.value - 1);
|
||||
|
||||
// --- Step Update Logic ---
|
||||
@@ -71,15 +85,30 @@ const updateStep = (newStep: number) => {
|
||||
dispatch(Actions.update('configStep', () => ({ current: newStep, total })));
|
||||
};
|
||||
|
||||
// --- Type guard for elements with step options ---
|
||||
interface ElementWithStep {
|
||||
options?: {
|
||||
step?: number;
|
||||
[key: string]: unknown;
|
||||
};
|
||||
}
|
||||
|
||||
function hasStepOption(element: unknown): element is ElementWithStep {
|
||||
return (
|
||||
element != null &&
|
||||
typeof element === 'object' &&
|
||||
'options' in element &&
|
||||
typeof element.options === 'object' &&
|
||||
element.options !== null &&
|
||||
typeof (element.options as { step?: number }).step === 'number'
|
||||
);
|
||||
}
|
||||
|
||||
// --- Filtered Elements for Current Step ---
|
||||
const currentStepElements = computed(() => {
|
||||
const filtered = (props.uischema.elements || []).filter((element: UISchemaElement) => {
|
||||
// Check if the element has an 'options' object and an 'step' property
|
||||
return (
|
||||
typeof element.options === 'object' &&
|
||||
element.options !== null &&
|
||||
element.options.step === currentStep.value
|
||||
);
|
||||
const elements = props.uischema.elements || [];
|
||||
const filtered = elements.filter((element) => {
|
||||
return hasStepOption(element) && element.options!.step === currentStep.value;
|
||||
});
|
||||
return filtered;
|
||||
});
|
||||
@@ -142,15 +171,12 @@ const getStepState = (stepIndex: number): StepState => {
|
||||
/>
|
||||
</StepperItem>
|
||||
</Stepper>
|
||||
|
||||
<!-- Render elements for the current step -->
|
||||
<!-- Added key to force re-render on step change, ensuring correct elements display -->
|
||||
<div class="current-step-content rounded-md border p-4 shadow" :key="`step-content-${currentStep}`">
|
||||
<DispatchRenderer
|
||||
v-for="(element, index) in currentStepElements"
|
||||
:key="`${layout.path}-${index}-step-${currentStep}`"
|
||||
:schema="props.schema as JsonSchema"
|
||||
:uischema="element as UISchemaElement"
|
||||
:schema="props.schema"
|
||||
:uischema="element"
|
||||
:path="layout.path || ''"
|
||||
:renderers="layout.renderers"
|
||||
:cells="layout.cells"
|
||||
|
||||
@@ -41,6 +41,7 @@ const placeholder = computed(() => control.value.uischema?.options?.placeholder
|
||||
|
||||
<template>
|
||||
<div class="space-y-4">
|
||||
<p v-if="control.description" v-html="control.description" />
|
||||
<div v-for="(item, index) in items" :key="index" class="flex gap-2">
|
||||
<Input
|
||||
:type="inputType"
|
||||
|
||||
@@ -39,7 +39,7 @@ const elements = computed(() => {
|
||||
<template v-for="(element, _i) in elements" :key="_i">
|
||||
<DispatchRenderer
|
||||
:schema="layout.layout.value.schema"
|
||||
:uischema="element"
|
||||
:uischema="element as any"
|
||||
:path="layout.layout.value.path"
|
||||
:enabled="layout.layout.value.enabled"
|
||||
:renderers="layout.layout.value.renderers"
|
||||
|
||||
@@ -43,7 +43,7 @@ const elements = computed(() => {
|
||||
<template v-for="(element, _i) in elements" :key="_i">
|
||||
<DispatchRenderer
|
||||
:schema="layout.layout.value.schema"
|
||||
:uischema="element"
|
||||
:uischema="element as any"
|
||||
:path="layout.layout.value.path"
|
||||
:enabled="layout.layout.value.enabled"
|
||||
:renderers="layout.layout.value.renderers"
|
||||
|
||||
7
unraid-ui/src/forms/jsonforms.d.ts
vendored
7
unraid-ui/src/forms/jsonforms.d.ts
vendored
@@ -1,7 +0,0 @@
|
||||
import '@jsonforms/core';
|
||||
|
||||
declare module '@jsonforms/core' {
|
||||
export interface UISchemaElement {
|
||||
label?: string;
|
||||
}
|
||||
}
|
||||
@@ -74,6 +74,10 @@ export const jsonFormsRenderers: JsonFormsRendererRegistryEntry[] = [
|
||||
renderer: markRaw(withErrorWrapper(switchRenderer)),
|
||||
tester: rankWith(4, and(isBooleanControl, optionIs('format', 'toggle'))),
|
||||
},
|
||||
{
|
||||
renderer: markRaw(withErrorWrapper(switchRenderer)),
|
||||
tester: rankWith(4, and(isBooleanControl, optionIs('format', 'checkbox'))),
|
||||
},
|
||||
{
|
||||
renderer: markRaw(withErrorWrapper(selectRenderer)),
|
||||
tester: rankWith(4, and(isEnumControl)),
|
||||
|
||||
12
vitest.config.ts
Normal file
12
vitest.config.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { defineConfig } from 'vitest/config'
|
||||
|
||||
export default defineConfig({
|
||||
test: {
|
||||
projects: [
|
||||
"./api/vite.config.ts",
|
||||
"./plugin/vitest.config.ts",
|
||||
"./unraid-ui/vitest.config.ts",
|
||||
"./web/vitest.config.mjs"
|
||||
]
|
||||
}
|
||||
})
|
||||
@@ -1,8 +0,0 @@
|
||||
import { defineWorkspace } from 'vitest/config'
|
||||
|
||||
export default defineWorkspace([
|
||||
"./plugin/vitest.config.ts",
|
||||
"./api/vite.config.ts",
|
||||
"./web/vitest.config.mjs",
|
||||
"./unraid-ui/vitest.config.ts"
|
||||
])
|
||||
133
web/components/Backup/BackupEntry.vue
Normal file
133
web/components/Backup/BackupEntry.vue
Normal file
@@ -0,0 +1,133 @@
|
||||
<script setup lang="ts">
|
||||
import type { BackupJobsQuery } from '~/composables/gql/graphql';
|
||||
import { BackupJobStatus } from '~/composables/gql/graphql';
|
||||
import { useFragment } from '~/composables/gql/fragment-masking';
|
||||
import { JOB_STATUS_FRAGMENT } from './backup-jobs.query';
|
||||
import { computed } from 'vue';
|
||||
|
||||
interface Props {
|
||||
job: NonNullable<BackupJobsQuery['backup']>['jobs'][0];
|
||||
}
|
||||
|
||||
const props = defineProps<Props>();
|
||||
|
||||
const jobData = useFragment(JOB_STATUS_FRAGMENT, props.job);
|
||||
|
||||
// Determine job status based on job properties
|
||||
const jobStatus = computed(() => {
|
||||
if (jobData.status) {
|
||||
return jobData.status;
|
||||
}
|
||||
if (jobData.error) return BackupJobStatus.FAILED;
|
||||
if (jobData.endTime && !jobData.error) return BackupJobStatus.COMPLETED;
|
||||
if (jobData.endTime && jobData.error) return BackupJobStatus.FAILED;
|
||||
return BackupJobStatus.RUNNING;
|
||||
});
|
||||
|
||||
const statusColor = computed(() => {
|
||||
switch (jobStatus.value) {
|
||||
case BackupJobStatus.FAILED:
|
||||
case BackupJobStatus.CANCELLED:
|
||||
return 'red';
|
||||
case BackupJobStatus.COMPLETED:
|
||||
return 'green';
|
||||
case BackupJobStatus.RUNNING:
|
||||
default:
|
||||
return 'blue';
|
||||
}
|
||||
});
|
||||
|
||||
const statusText = computed(() => {
|
||||
switch (jobStatus.value) {
|
||||
case BackupJobStatus.FAILED:
|
||||
return 'Error';
|
||||
case BackupJobStatus.CANCELLED:
|
||||
return 'Cancelled';
|
||||
case BackupJobStatus.COMPLETED:
|
||||
return 'Completed';
|
||||
case BackupJobStatus.RUNNING:
|
||||
default:
|
||||
return 'Running';
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div class="bg-white dark:bg-gray-800 border border-gray-200 dark:border-gray-700 rounded-lg p-6 shadow-sm">
|
||||
<div class="flex items-center justify-between mb-4">
|
||||
<div class="flex items-center space-x-3">
|
||||
<div class="flex-shrink-0">
|
||||
<div
|
||||
:class="[
|
||||
'w-3 h-3 rounded-full',
|
||||
statusColor === 'green' ? 'bg-green-400' : statusColor === 'red' ? 'bg-red-400' : 'bg-blue-400',
|
||||
jobStatus === BackupJobStatus.RUNNING ? 'animate-pulse' : ''
|
||||
]"
|
||||
></div>
|
||||
</div>
|
||||
<div>
|
||||
<h3 class="text-lg font-medium text-gray-900 dark:text-white">
|
||||
{{ jobData.name || 'Backup Job' }}
|
||||
</h3>
|
||||
<div class="text-sm text-gray-500 dark:text-gray-400 space-y-1">
|
||||
<p>Job ID: {{ jobData.id }}</p>
|
||||
<p v-if="jobData.externalJobId">External Job ID: {{ jobData.externalJobId }}</p>
|
||||
<p>Status: {{ statusText }}</p>
|
||||
<p v-if="jobData.message" class="text-gray-600 dark:text-gray-300">{{ jobData.message }}</p>
|
||||
<p v-if="jobData.error" class="text-red-600 dark:text-red-400">Error: {{ jobData.error }}</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<span
|
||||
:class="`inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium ${statusColor === 'green' ? 'bg-green-100 text-green-800 dark:bg-green-900/20 dark:text-green-400' : statusColor === 'red' ? 'bg-red-100 text-red-800 dark:bg-red-900/20 dark:text-red-400' : 'bg-blue-100 text-blue-800 dark:bg-blue-900/20 dark:text-blue-400'}`"
|
||||
>
|
||||
{{ statusText }}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<div class="grid grid-cols-1 md:grid-cols-3 gap-4">
|
||||
<div v-if="jobData.formattedBytesTransferred" class="bg-gray-50 dark:bg-gray-700 rounded-lg p-3">
|
||||
<dt class="text-sm font-medium text-gray-500 dark:text-gray-400">Bytes Transferred</dt>
|
||||
<dd class="mt-1 text-sm text-gray-900 dark:text-white">
|
||||
{{ jobData.formattedBytesTransferred }}
|
||||
</dd>
|
||||
</div>
|
||||
|
||||
<div v-if="jobData.formattedSpeed" class="bg-gray-50 dark:bg-gray-700 rounded-lg p-3">
|
||||
<dt class="text-sm font-medium text-gray-500 dark:text-gray-400">Transfer Speed</dt>
|
||||
<dd class="mt-1 text-sm text-gray-900 dark:text-white">{{ jobData.formattedSpeed }}</dd>
|
||||
</div>
|
||||
|
||||
<div v-if="jobData.formattedElapsedTime" class="bg-gray-50 dark:bg-gray-700 rounded-lg p-3">
|
||||
<dt class="text-sm font-medium text-gray-500 dark:text-gray-400">Elapsed Time</dt>
|
||||
<dd class="mt-1 text-sm text-gray-900 dark:text-white">
|
||||
{{ jobData.formattedElapsedTime }}
|
||||
</dd>
|
||||
</div>
|
||||
|
||||
<div v-if="jobData.formattedEta" class="bg-gray-50 dark:bg-gray-700 rounded-lg p-3">
|
||||
<dt class="text-sm font-medium text-gray-500 dark:text-gray-400">ETA</dt>
|
||||
<dd class="mt-1 text-sm text-gray-900 dark:text-white">
|
||||
{{ jobData.formattedEta }}
|
||||
</dd>
|
||||
</div>
|
||||
|
||||
<div v-if="jobData.progress !== null && jobData.progress !== undefined" class="bg-gray-50 dark:bg-gray-700 rounded-lg p-3">
|
||||
<dt class="text-sm font-medium text-gray-500 dark:text-gray-400">Progress</dt>
|
||||
<dd class="mt-1 text-sm text-gray-900 dark:text-white">{{ Math.round(jobData.progress) }}%</dd>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div v-if="jobData.progress !== null && jobData.progress !== undefined" class="mt-4">
|
||||
<div class="w-full bg-gray-200 rounded-full h-2 dark:bg-gray-700">
|
||||
<div
|
||||
:class="[
|
||||
'h-2 rounded-full transition-all duration-300',
|
||||
statusColor === 'green' ? 'bg-green-600' : statusColor === 'red' ? 'bg-red-600' : 'bg-blue-600'
|
||||
]"
|
||||
:style="{ width: `${Math.round(jobData.progress)}%` }"
|
||||
></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
118
web/components/Backup/BackupJobConfig.vue
Normal file
118
web/components/Backup/BackupJobConfig.vue
Normal file
@@ -0,0 +1,118 @@
|
||||
<script setup lang="ts">
|
||||
import { ref, computed } from 'vue';
|
||||
import { useQuery } from '@vue/apollo-composable';
|
||||
|
||||
import { Button, Sheet, SheetContent, SheetTitle, Spinner } from '@unraid/ui';
|
||||
|
||||
import { BACKUP_JOB_CONFIGS_LIST_QUERY } from '~/components/Backup/backup-jobs.query';
|
||||
import BackupJobConfigForm from '~/components/Backup/BackupJobConfigForm.vue';
|
||||
import BackupJobItem from '~/components/Backup/BackupJobItem.vue';
|
||||
|
||||
const showConfigModal = ref(false);
|
||||
const currentEditingConfigId = ref<string | null>(null);
|
||||
|
||||
const { result, loading, error, refetch } = useQuery(
|
||||
BACKUP_JOB_CONFIGS_LIST_QUERY,
|
||||
{},
|
||||
{
|
||||
fetchPolicy: 'cache-and-network',
|
||||
pollInterval: 50000, // Much slower polling since we only need the list of configs
|
||||
}
|
||||
);
|
||||
|
||||
const backupConfigIds = computed(() => {
|
||||
return result.value?.backup?.configs?.map((config) => config.id) || [];
|
||||
});
|
||||
|
||||
function handleJobDeleted() {
|
||||
refetch();
|
||||
}
|
||||
|
||||
function openAddJobModal() {
|
||||
currentEditingConfigId.value = null;
|
||||
showConfigModal.value = true;
|
||||
}
|
||||
|
||||
function openEditJobModal(configId: string) {
|
||||
currentEditingConfigId.value = configId;
|
||||
showConfigModal.value = true;
|
||||
}
|
||||
|
||||
function onConfigComplete() {
|
||||
showConfigModal.value = false;
|
||||
currentEditingConfigId.value = null;
|
||||
refetch();
|
||||
}
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div class="backup-config">
|
||||
<div class="flex items-center justify-between mb-6">
|
||||
<h2 class="text-xl font-bold text-gray-900 dark:text-white">Scheduled Backup Jobs</h2>
|
||||
<Button variant="primary" @click="openAddJobModal"> Add Backup Job </Button>
|
||||
</div>
|
||||
|
||||
<div v-if="loading && !result" class="text-center py-8">
|
||||
<Spinner class="mx-auto" />
|
||||
<p class="mt-2 text-gray-600 dark:text-gray-400">Loading backup configurations...</p>
|
||||
</div>
|
||||
|
||||
<div
|
||||
v-else-if="error"
|
||||
class="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-4"
|
||||
>
|
||||
<div class="flex">
|
||||
<div class="ml-3">
|
||||
<h3 class="text-sm font-medium text-red-800 dark:text-red-200">
|
||||
Error loading backup configurations
|
||||
</h3>
|
||||
<div class="mt-2 text-sm text-red-700 dark:text-red-300">
|
||||
{{ error.message }}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div v-else-if="!backupConfigIds?.length" class="text-center py-12">
|
||||
<div class="text-gray-400 dark:text-gray-600 mb-4">
|
||||
<svg class="w-12 h-12 mx-auto" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path
|
||||
stroke-linecap="round"
|
||||
stroke-linejoin="round"
|
||||
stroke-width="2"
|
||||
d="M8 7V3a4 4 0 118 0v4m-4 8l-4-4 4-4m0 8h8a2 2 0 002-2V5a2 2 0 00-2-2H4a2 2 0 002 2v10a2 2 0 002 2z"
|
||||
/>
|
||||
</svg>
|
||||
</div>
|
||||
<h3 class="text-lg font-medium text-gray-900 dark:text-white mb-2">No backup jobs configured</h3>
|
||||
<p class="text-gray-600 dark:text-gray-400 mb-4">
|
||||
Create your first scheduled backup job to automatically protect your data.
|
||||
</p>
|
||||
<Button variant="primary" @click="openAddJobModal"> Create First Backup Job </Button>
|
||||
</div>
|
||||
|
||||
<div v-else class="space-y-4">
|
||||
<BackupJobItem
|
||||
v-for="configId in backupConfigIds"
|
||||
:key="configId"
|
||||
:config-id="configId"
|
||||
@deleted="handleJobDeleted"
|
||||
@edit="openEditJobModal"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<Sheet v-model:open="showConfigModal">
|
||||
<SheetContent class="w-full max-w-4xl max-h-[90vh] overflow-auto">
|
||||
<SheetTitle class="text-xl font-semibold text-gray-900 dark:text-white mb-4">
|
||||
{{ currentEditingConfigId ? 'Edit Backup Job' : 'Add New Backup Job' }}
|
||||
</SheetTitle>
|
||||
<div class="p-6">
|
||||
<BackupJobConfigForm
|
||||
:config-id="currentEditingConfigId"
|
||||
@complete="onConfigComplete"
|
||||
@cancel="showConfigModal = false" />
|
||||
</div>
|
||||
</SheetContent>
|
||||
</Sheet>
|
||||
</div>
|
||||
</template>
|
||||
362
web/components/Backup/BackupJobConfigForm.vue
Normal file
362
web/components/Backup/BackupJobConfigForm.vue
Normal file
@@ -0,0 +1,362 @@
|
||||
<script lang="ts" setup>
|
||||
import { computed, provide, ref, watch } from 'vue';
|
||||
import { useMutation, useQuery } from '@vue/apollo-composable';
|
||||
|
||||
import { Button, JsonForms } from '@unraid/ui';
|
||||
|
||||
import type {
|
||||
BackupJobConfig,
|
||||
CreateBackupJobConfigInput,
|
||||
UpdateBackupJobConfigInput,
|
||||
} from '~/composables/gql/graphql';
|
||||
import type { Ref } from 'vue';
|
||||
|
||||
import {
|
||||
BACKUP_JOB_CONFIG_FORM_QUERY,
|
||||
BACKUP_JOB_CONFIG_QUERY,
|
||||
CREATE_BACKUP_JOB_CONFIG_MUTATION,
|
||||
UPDATE_BACKUP_JOB_CONFIG_MUTATION,
|
||||
} from './backup-jobs.query';
|
||||
|
||||
// Define props
|
||||
const props = defineProps<{
|
||||
configId?: string | null;
|
||||
}>();
|
||||
|
||||
// Define emit events
|
||||
const emit = defineEmits<{
|
||||
complete: [];
|
||||
cancel: [];
|
||||
}>();
|
||||
|
||||
// Define types for form state
|
||||
interface ConfigStep {
|
||||
current: number;
|
||||
total: number;
|
||||
}
|
||||
|
||||
// Determine if we are in edit mode
|
||||
const isEditMode = computed(() => !!props.configId);
|
||||
|
||||
// Form state
|
||||
const formState: Ref<Record<string, unknown>> = ref({}); // Using unknown for now due to dynamic nature of JsonForms data
|
||||
|
||||
// Get form schema
|
||||
const {
|
||||
result: formResult,
|
||||
loading: formLoading,
|
||||
refetch: updateFormSchema,
|
||||
} = useQuery(BACKUP_JOB_CONFIG_FORM_QUERY, () => ({
|
||||
input: {
|
||||
showAdvanced:
|
||||
typeof formState.value?.showAdvanced === 'boolean' ? formState.value.showAdvanced : false,
|
||||
},
|
||||
}));
|
||||
|
||||
// Fetch existing config data if in edit mode
|
||||
const {
|
||||
result: existingConfigResult,
|
||||
loading: existingConfigLoading,
|
||||
onError: onExistingConfigError,
|
||||
refetch: refetchExistingConfig,
|
||||
} = useQuery(
|
||||
BACKUP_JOB_CONFIG_QUERY,
|
||||
() => ({ id: props.configId! }),
|
||||
() => ({
|
||||
enabled: isEditMode.value,
|
||||
fetchPolicy: 'network-only',
|
||||
})
|
||||
);
|
||||
|
||||
onExistingConfigError((err) => {
|
||||
console.error('Error fetching existing backup job config:', err);
|
||||
if (window.toast) {
|
||||
window.toast.error('Failed to load backup job data for editing.');
|
||||
}
|
||||
});
|
||||
|
||||
watch(
|
||||
existingConfigResult,
|
||||
(newVal) => {
|
||||
if (newVal?.backupJobConfig && isEditMode.value) {
|
||||
const config = newVal.backupJobConfig as BackupJobConfig;
|
||||
|
||||
const {
|
||||
__typename,
|
||||
sourceConfig: fetchedSourceConfig,
|
||||
destinationConfig: fetchedDestinationConfig,
|
||||
schedule,
|
||||
...baseConfigFields
|
||||
} = config;
|
||||
|
||||
const populatedDataForForm: Record<string, unknown> = {
|
||||
...baseConfigFields,
|
||||
schedule: schedule,
|
||||
};
|
||||
|
||||
if (fetchedSourceConfig) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const { __typename: st, ...sourceData } = fetchedSourceConfig as Record<string, unknown>;
|
||||
populatedDataForForm.sourceConfig = sourceData;
|
||||
if (typeof sourceData.type === 'string') {
|
||||
populatedDataForForm.sourceType = sourceData.type;
|
||||
}
|
||||
}
|
||||
|
||||
if (fetchedDestinationConfig) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const { __typename: dt, ...destData } = fetchedDestinationConfig as Record<string, unknown>;
|
||||
populatedDataForForm.destinationConfig = destData;
|
||||
if (typeof destData.type === 'string') {
|
||||
populatedDataForForm.destinationType = destData.type;
|
||||
}
|
||||
}
|
||||
|
||||
const finalFormData = { ...(formState.value || {}), ...populatedDataForForm };
|
||||
|
||||
const cleanedFormData: Record<string, unknown> = {};
|
||||
for (const key in finalFormData) {
|
||||
if (
|
||||
Object.prototype.hasOwnProperty.call(finalFormData, key) &&
|
||||
finalFormData[key] !== undefined
|
||||
) {
|
||||
cleanedFormData[key] = finalFormData[key];
|
||||
}
|
||||
}
|
||||
|
||||
formState.value = cleanedFormData;
|
||||
console.log('[BackupJobConfigForm] Populated formState with existing data:', formState.value);
|
||||
}
|
||||
},
|
||||
{ immediate: true, deep: true }
|
||||
);
|
||||
|
||||
// Watch for changes to showAdvanced and refetch schema
|
||||
let refetchTimeout: NodeJS.Timeout | null = null;
|
||||
watch(
|
||||
formState,
|
||||
async (newValue, oldValue) => {
|
||||
const newStepCurrent = (newValue?.configStep as ConfigStep)?.current ?? 0;
|
||||
const oldStepCurrent = (oldValue?.configStep as ConfigStep)?.current ?? 0;
|
||||
const newShowAdvanced = typeof newValue?.showAdvanced === 'boolean' ? newValue.showAdvanced : false;
|
||||
const oldShowAdvanced = typeof oldValue?.showAdvanced === 'boolean' ? oldValue.showAdvanced : false;
|
||||
const shouldRefetch = newShowAdvanced !== oldShowAdvanced || newStepCurrent !== oldStepCurrent;
|
||||
|
||||
if (shouldRefetch) {
|
||||
if (newShowAdvanced !== oldShowAdvanced) {
|
||||
console.log('[BackupJobConfigForm] showAdvanced changed:', newShowAdvanced);
|
||||
}
|
||||
if (newStepCurrent !== oldStepCurrent) {
|
||||
console.log(
|
||||
'[BackupJobConfigForm] configStep.current changed:',
|
||||
newStepCurrent,
|
||||
'from:',
|
||||
oldStepCurrent,
|
||||
'Refetching schema.'
|
||||
);
|
||||
}
|
||||
|
||||
if (refetchTimeout) {
|
||||
clearTimeout(refetchTimeout);
|
||||
}
|
||||
|
||||
refetchTimeout = setTimeout(async () => {
|
||||
await updateFormSchema({
|
||||
input: {
|
||||
showAdvanced: newShowAdvanced,
|
||||
},
|
||||
});
|
||||
refetchTimeout = null;
|
||||
}, 100);
|
||||
}
|
||||
},
|
||||
{ deep: true }
|
||||
);
|
||||
|
||||
/**
|
||||
* Form submission and mutation handling
|
||||
*/
|
||||
const {
|
||||
mutate: createBackupJobConfig,
|
||||
loading: isCreating,
|
||||
error: createError,
|
||||
onDone: onCreateDone,
|
||||
} = useMutation(CREATE_BACKUP_JOB_CONFIG_MUTATION);
|
||||
|
||||
const {
|
||||
mutate: updateBackupJobConfig,
|
||||
loading: isUpdating,
|
||||
error: updateError,
|
||||
onDone: onUpdateDone,
|
||||
} = useMutation(UPDATE_BACKUP_JOB_CONFIG_MUTATION);
|
||||
|
||||
const isLoading = computed(
|
||||
() =>
|
||||
isCreating.value ||
|
||||
isUpdating.value ||
|
||||
formLoading.value ||
|
||||
(isEditMode.value && existingConfigLoading.value)
|
||||
);
|
||||
const mutationError = computed(() => createError.value || updateError.value);
|
||||
|
||||
// Handle form submission
|
||||
const submitForm = async () => {
|
||||
try {
|
||||
// Remove form-specific state like configStep or showAdvanced before submission.
|
||||
// Also remove sourceType and destinationType as they are likely derived for the UI
|
||||
// and the mutation probably expects type information within the nested config objects.
|
||||
const {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
configStep,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
showAdvanced,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
sourceType, // Destructure to exclude from inputData
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
destinationType, // Destructure to exclude from inputData
|
||||
...mutationInputData // Contains name, enabled, schedule, sourceConfig (obj), destinationConfig (obj)
|
||||
} = formState.value;
|
||||
|
||||
// The mutationInputData should now align with CreateBackupJobConfigInput / UpdateBackupJobConfigInput
|
||||
// which expect nested sourceConfig and destinationConfig.
|
||||
const finalPayload = mutationInputData as unknown;
|
||||
|
||||
if (isEditMode.value && props.configId) {
|
||||
await updateBackupJobConfig({
|
||||
id: props.configId,
|
||||
// The `input` here should strictly match UpdateBackupJobConfigInput
|
||||
input: finalPayload as UpdateBackupJobConfigInput,
|
||||
});
|
||||
} else {
|
||||
await createBackupJobConfig({
|
||||
// The `input` here should strictly match CreateBackupJobConfigInput
|
||||
input: finalPayload as CreateBackupJobConfigInput,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error ${isEditMode.value ? 'updating' : 'creating'} backup job config:`, error);
|
||||
}
|
||||
};
|
||||
|
||||
// Handle successful creation/update
|
||||
const onMutationSuccess = (isUpdate: boolean) => {
|
||||
if (window.toast) {
|
||||
window.toast.success(`Backup Job ${isUpdate ? 'Updated' : 'Created'}`, {
|
||||
description: `Successfully ${isUpdate ? 'updated' : 'created'} backup job "${formState.value?.name as string}"`,
|
||||
});
|
||||
}
|
||||
console.log(`[BackupJobConfigForm] on${isUpdate ? 'Update' : 'Create'}Done`);
|
||||
formState.value = {};
|
||||
emit('complete');
|
||||
};
|
||||
|
||||
onCreateDone(() => onMutationSuccess(false));
|
||||
onUpdateDone(() => onMutationSuccess(true));
|
||||
|
||||
const parsedOriginalErrorMessage = computed(() => {
|
||||
const originalError = mutationError.value?.graphQLErrors?.[0]?.extensions?.originalError;
|
||||
if (
|
||||
originalError &&
|
||||
typeof originalError === 'object' &&
|
||||
originalError !== null &&
|
||||
'message' in originalError
|
||||
) {
|
||||
return (originalError as { message: string | string[] }).message;
|
||||
}
|
||||
return undefined;
|
||||
});
|
||||
|
||||
let changeTimeout: NodeJS.Timeout | null = null;
|
||||
const onChange = ({ data }: { data: Record<string, unknown> }) => {
|
||||
if (changeTimeout) {
|
||||
clearTimeout(changeTimeout);
|
||||
}
|
||||
|
||||
changeTimeout = setTimeout(() => {
|
||||
console.log('[BackupJobConfigForm] onChange', data);
|
||||
changeTimeout = null;
|
||||
}, 300);
|
||||
|
||||
formState.value = data;
|
||||
};
|
||||
|
||||
provide('submitForm', submitForm);
|
||||
provide('isSubmitting', isLoading);
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div
|
||||
class="bg-white dark:bg-gray-800 rounded-lg border border-gray-200 dark:border-gray-700 shadow-sm"
|
||||
>
|
||||
<div class="p-6">
|
||||
<h2 class="text-xl font-medium mb-4 text-gray-900 dark:text-white">
|
||||
{{ isEditMode ? 'Edit Backup Job' : 'Configure Backup Job' }}
|
||||
</h2>
|
||||
|
||||
<div
|
||||
v-if="mutationError"
|
||||
class="mb-4 p-4 bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 text-red-700 dark:text-red-300 rounded-md"
|
||||
>
|
||||
<p>{{ mutationError.message }}</p>
|
||||
<ul v-if="Array.isArray(parsedOriginalErrorMessage)" class="list-disc list-inside mt-2">
|
||||
<li v-for="(msg, index) in parsedOriginalErrorMessage" :key="index">{{ msg }}</li>
|
||||
</ul>
|
||||
<p
|
||||
v-else-if="
|
||||
typeof parsedOriginalErrorMessage === 'string' && parsedOriginalErrorMessage.length > 0
|
||||
"
|
||||
class="mt-2"
|
||||
>
|
||||
{{ parsedOriginalErrorMessage }}
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div
|
||||
v-if="isLoading || (isEditMode && existingConfigLoading && !formResult)"
|
||||
class="py-8 text-center text-gray-500 dark:text-gray-400"
|
||||
>
|
||||
{{
|
||||
isEditMode && existingConfigLoading
|
||||
? 'Loading existing job data...'
|
||||
: 'Loading configuration form...'
|
||||
}}
|
||||
</div>
|
||||
|
||||
<!-- Form -->
|
||||
<div v-else-if="formResult?.backupJobConfigForm" class="mt-6 [&_.vertical-layout]:space-y-6">
|
||||
<JsonForms
|
||||
v-if="formResult?.backupJobConfigForm"
|
||||
:schema="formResult.backupJobConfigForm.dataSchema"
|
||||
:uischema="formResult.backupJobConfigForm.uiSchema"
|
||||
:data="formState"
|
||||
:readonly="isLoading"
|
||||
@change="onChange"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div
|
||||
v-else-if="!isLoading && !formResult?.backupJobConfigForm"
|
||||
class="py-8 text-center text-gray-500 dark:text-gray-400"
|
||||
>
|
||||
Could not load form configuration. Please try again.
|
||||
<Button
|
||||
v-if="isEditMode"
|
||||
variant="link"
|
||||
@click="
|
||||
async () => {
|
||||
await refetchExistingConfig?.();
|
||||
await updateFormSchema();
|
||||
}
|
||||
"
|
||||
>
|
||||
Retry loading data
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<style lang="postcss">
|
||||
/* Import unraid-ui globals first */
|
||||
@import '@unraid/ui/styles';
|
||||
</style>
|
||||
508
web/components/Backup/BackupJobItem.vue
Normal file
508
web/components/Backup/BackupJobItem.vue
Normal file
@@ -0,0 +1,508 @@
|
||||
<script setup lang="ts">
|
||||
import { computed, ref, watch } from 'vue';
|
||||
import { useMutation, useQuery } from '@vue/apollo-composable';
|
||||
|
||||
import { PlayIcon, StopIcon, TrashIcon, PencilIcon } from '@heroicons/vue/24/solid';
|
||||
import { Badge, Button, Switch } from '@unraid/ui';
|
||||
|
||||
import {
|
||||
BACKUP_JOB_CONFIG_FRAGMENT,
|
||||
BACKUP_JOB_CONFIG_QUERY,
|
||||
BACKUP_JOB_CONFIG_WITH_CURRENT_JOB_FRAGMENT,
|
||||
JOB_STATUS_FRAGMENT,
|
||||
SOURCE_CONFIG_FRAGMENT,
|
||||
STOP_BACKUP_JOB_MUTATION,
|
||||
TOGGLE_BACKUP_JOB_CONFIG_MUTATION,
|
||||
TRIGGER_BACKUP_JOB_MUTATION,
|
||||
DELETE_BACKUP_JOB_CONFIG_MUTATION,
|
||||
} from '~/components/Backup/backup-jobs.query';
|
||||
import { useFragment } from '~/composables/gql/fragment-masking';
|
||||
import { BackupJobStatus } from '~/composables/gql/graphql';
|
||||
import type {SourceConfigFragment} from '~/composables/gql/graphql';
|
||||
|
||||
interface Props {
|
||||
configId: string;
|
||||
}
|
||||
|
||||
const props = defineProps<Props>();
|
||||
|
||||
// Validate configId prop
|
||||
if (!props.configId) {
|
||||
console.warn('BackupJobItem: configId prop is required but not provided');
|
||||
}
|
||||
|
||||
const emit = defineEmits(['deleted', 'edit']);
|
||||
|
||||
const isToggling = ref(false);
|
||||
const isTriggering = ref(false);
|
||||
const showDeleteConfirm = ref(false);
|
||||
|
||||
// Add reactive variables for the query
|
||||
const queryVariables = computed(() => ({ id: props.configId }));
|
||||
|
||||
const { result, loading, error, refetch } = useQuery(BACKUP_JOB_CONFIG_QUERY, queryVariables, {
|
||||
fetchPolicy: 'network-only',
|
||||
pollInterval: 5000,
|
||||
errorPolicy: 'all', // Show partial data even if there are errors
|
||||
});
|
||||
|
||||
// Add debugging to see what's happening
|
||||
watch(
|
||||
[result, error, loading],
|
||||
([newResult, newError, newLoading]) => {
|
||||
console.log('BackupJobItem query state:', {
|
||||
configId: props.configId,
|
||||
loading: newLoading,
|
||||
error: newError,
|
||||
result: newResult,
|
||||
backupJobConfig: newResult?.backupJobConfig,
|
||||
});
|
||||
},
|
||||
{ immediate: true }
|
||||
);
|
||||
|
||||
// Watch for configId changes and refetch
|
||||
watch(
|
||||
() => props.configId,
|
||||
(newConfigId) => {
|
||||
if (newConfigId) {
|
||||
console.log('ConfigId changed, refetching:', newConfigId);
|
||||
refetch();
|
||||
// Reset delete confirmation when configId changes
|
||||
showDeleteConfirm.value = false;
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const { mutate: toggleJobConfig } = useMutation(TOGGLE_BACKUP_JOB_CONFIG_MUTATION);
|
||||
const { mutate: triggerJob } = useMutation(TRIGGER_BACKUP_JOB_MUTATION);
|
||||
const { mutate: stopJob } = useMutation(STOP_BACKUP_JOB_MUTATION);
|
||||
const { mutate: deleteJobConfig, loading: isDeletingJob } = useMutation(
|
||||
DELETE_BACKUP_JOB_CONFIG_MUTATION
|
||||
);
|
||||
|
||||
const configWithJob = computed(() => {
|
||||
if (!result.value?.backupJobConfig) {
|
||||
console.log('No backupJobConfig in result:', result.value);
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const config = useFragment(
|
||||
BACKUP_JOB_CONFIG_WITH_CURRENT_JOB_FRAGMENT,
|
||||
result.value.backupJobConfig
|
||||
);
|
||||
const baseConfig = useFragment(BACKUP_JOB_CONFIG_FRAGMENT, config);
|
||||
const currentJob = computed(() =>
|
||||
config.currentJob
|
||||
? useFragment(JOB_STATUS_FRAGMENT, config.currentJob)
|
||||
: null
|
||||
);
|
||||
const sourceConfig = baseConfig.sourceConfig
|
||||
? useFragment(SOURCE_CONFIG_FRAGMENT, baseConfig.sourceConfig)
|
||||
: undefined;
|
||||
|
||||
return {
|
||||
...baseConfig,
|
||||
sourceConfig,
|
||||
runningJob: currentJob.value,
|
||||
errorMessage: currentJob.value?.error || undefined,
|
||||
isRunning: currentJob.value ? currentJob.value.status === BackupJobStatus.RUNNING : false,
|
||||
hasRecentJob: !!currentJob.value,
|
||||
sourcePath: getSourcePath(sourceConfig),
|
||||
};
|
||||
} catch (fragmentError) {
|
||||
console.error('Error processing fragments:', fragmentError);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
function getSourcePath(sourceConfig: SourceConfigFragment | undefined): string {
|
||||
if (!sourceConfig) return '';
|
||||
|
||||
if (sourceConfig.__typename === 'RawBackupConfig') {
|
||||
return sourceConfig.sourcePath || '';
|
||||
} else if (sourceConfig.__typename === 'FlashPreprocessConfig') {
|
||||
return sourceConfig.flashPath || '';
|
||||
} else if (sourceConfig.__typename === 'ScriptPreprocessConfig') {
|
||||
return sourceConfig.scriptPath || '';
|
||||
} else if (sourceConfig.__typename === 'ZfsPreprocessConfig') {
|
||||
return `${sourceConfig.poolName}/${sourceConfig.datasetName}`;
|
||||
}
|
||||
|
||||
return '';
|
||||
}
|
||||
|
||||
function formatDate(dateString: string): string {
|
||||
return new Date(dateString).toLocaleString();
|
||||
}
|
||||
|
||||
async function handleToggleJob() {
|
||||
if (!configWithJob.value || isToggling.value) return;
|
||||
|
||||
isToggling.value = true;
|
||||
try {
|
||||
await toggleJobConfig({ id: configWithJob.value.id });
|
||||
} catch (error) {
|
||||
console.error('Failed to toggle job:', error);
|
||||
} finally {
|
||||
isToggling.value = false;
|
||||
}
|
||||
}
|
||||
|
||||
async function handleTriggerOrStopJob() {
|
||||
if (!configWithJob.value || isTriggering.value) return;
|
||||
|
||||
isTriggering.value = true;
|
||||
try {
|
||||
if (configWithJob.value.isRunning && configWithJob.value.runningJob?.id) {
|
||||
const result = await stopJob({ id: configWithJob.value.runningJob.id });
|
||||
if (result?.data?.backup?.stopBackupJob?.status) {
|
||||
console.log('Backup job stopped:', result.data.backup.stopBackupJob);
|
||||
}
|
||||
} else {
|
||||
const result = await triggerJob({ id: configWithJob.value.id });
|
||||
if (result?.data?.backup?.triggerJob?.jobId) {
|
||||
console.log('Backup job triggered:', result.data.backup.triggerJob);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to trigger/stop backup job:', error);
|
||||
} finally {
|
||||
isTriggering.value = false;
|
||||
}
|
||||
}
|
||||
|
||||
async function handleDeleteJob() {
|
||||
if (!configWithJob.value || isDeletingJob.value) return;
|
||||
|
||||
try {
|
||||
const result = await deleteJobConfig({ id: configWithJob.value.id });
|
||||
if (result?.data?.backup?.deleteBackupJobConfig) {
|
||||
console.log('Backup job config deleted:', configWithJob.value.id);
|
||||
emit('deleted', configWithJob.value.id);
|
||||
showDeleteConfirm.value = false;
|
||||
} else {
|
||||
console.error('Failed to delete backup job config, no confirmation in result:', result);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error deleting backup job config:', error);
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div
|
||||
v-if="loading"
|
||||
class="bg-white dark:bg-gray-800 border border-gray-200 dark:border-gray-700 rounded-lg p-6 shadow-sm animate-pulse"
|
||||
>
|
||||
<div class="flex items-center justify-between mb-4">
|
||||
<div class="flex items-center space-x-3">
|
||||
<div class="w-3 h-3 bg-gray-300 rounded-full"></div>
|
||||
<div class="space-y-2">
|
||||
<div class="h-4 bg-gray-300 rounded w-32"></div>
|
||||
<div class="h-3 bg-gray-300 rounded w-48"></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="flex items-center space-x-3">
|
||||
<div class="h-6 bg-gray-300 rounded w-16"></div>
|
||||
<div class="h-8 bg-gray-300 rounded w-20"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
v-else-if="error"
|
||||
class="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-4"
|
||||
>
|
||||
<p class="text-red-700 dark:text-red-300">Error loading backup job: {{ error.message }}</p>
|
||||
</div>
|
||||
|
||||
<div
|
||||
v-else-if="!loading && !configWithJob"
|
||||
class="bg-yellow-50 dark:bg-yellow-900/20 border border-yellow-200 dark:border-yellow-800 rounded-lg p-4"
|
||||
>
|
||||
<p class="text-yellow-700 dark:text-yellow-300">
|
||||
Backup job configuration not found (ID: {{ configId }})
|
||||
</p>
|
||||
<button
|
||||
class="mt-2 text-sm text-yellow-600 dark:text-yellow-400 hover:text-yellow-800 dark:hover:text-yellow-200 underline"
|
||||
@click="refetch()"
|
||||
>
|
||||
Retry loading
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div
|
||||
v-else-if="configWithJob"
|
||||
class="bg-white dark:bg-gray-800 border border-gray-200 dark:border-gray-700 rounded-lg p-6 shadow-sm relative"
|
||||
>
|
||||
<!-- Delete Confirmation Dialog -->
|
||||
<div
|
||||
v-if="showDeleteConfirm"
|
||||
class="absolute inset-0 z-10 bg-white/80 dark:bg-gray-800/80 flex flex-col items-center justify-center p-6 rounded-lg"
|
||||
>
|
||||
<p class="text-lg font-medium text-gray-900 dark:text-white mb-4 text-center">
|
||||
Are you sure you want to delete this backup job?
|
||||
</p>
|
||||
<p class="text-sm text-gray-600 dark:text-gray-400 mb-6 text-center">
|
||||
This action cannot be undone.
|
||||
</p>
|
||||
<div class="flex space-x-3">
|
||||
<Button variant="outline" size="sm" @click="showDeleteConfirm = false"> Cancel </Button>
|
||||
<Button
|
||||
variant="destructive"
|
||||
size="sm"
|
||||
:loading="isDeletingJob"
|
||||
@click="handleDeleteJob"
|
||||
>
|
||||
Delete
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="flex items-center justify-between mb-4">
|
||||
<div class="flex items-center space-x-3">
|
||||
<div class="flex-shrink-0">
|
||||
<div
|
||||
:class="[
|
||||
'w-3 h-3 rounded-full',
|
||||
configWithJob.runningJob?.status === BackupJobStatus.COMPLETED
|
||||
? 'bg-green-400'
|
||||
: configWithJob.errorMessage ||
|
||||
configWithJob.runningJob?.status === BackupJobStatus.CANCELLED ||
|
||||
configWithJob.runningJob?.status === BackupJobStatus.FAILED
|
||||
? 'bg-red-400'
|
||||
: configWithJob.isRunning
|
||||
? 'bg-blue-400 animate-pulse'
|
||||
: configWithJob.enabled
|
||||
? 'bg-green-400'
|
||||
: 'bg-gray-400',
|
||||
]"
|
||||
></div>
|
||||
</div>
|
||||
<div>
|
||||
<h3 class="text-lg font-medium text-gray-900 dark:text-white">
|
||||
{{ configWithJob.name }}
|
||||
<span v-if="configWithJob.isRunning" class="text-sm text-blue-600 dark:text-blue-400 ml-2">
|
||||
(Running)
|
||||
</span>
|
||||
<span
|
||||
v-else-if="configWithJob.runningJob?.status === BackupJobStatus.COMPLETED"
|
||||
class="text-sm text-green-600 dark:text-green-400 ml-2"
|
||||
>
|
||||
(Completed)
|
||||
</span>
|
||||
<span
|
||||
v-else-if="configWithJob.errorMessage"
|
||||
class="text-sm text-red-600 dark:text-red-400 ml-2"
|
||||
>
|
||||
({{ configWithJob.runningJob?.status }})
|
||||
</span>
|
||||
</h3>
|
||||
<p class="text-sm text-gray-500 dark:text-gray-400">
|
||||
{{ configWithJob.sourcePath }}
|
||||
<span
|
||||
v-if="configWithJob.sourceType && configWithJob.sourceType !== 'RAW'"
|
||||
class="ml-2 inline-flex items-center px-2 py-0.5 rounded text-xs font-medium bg-blue-100 text-blue-800 dark:bg-blue-900/20 dark:text-blue-400"
|
||||
>
|
||||
{{ configWithJob.sourceType }}
|
||||
</span>
|
||||
</p>
|
||||
<p v-if="configWithJob.errorMessage" class="text-sm text-red-600 dark:text-red-400 mt-1">
|
||||
Error: {{ configWithJob.errorMessage }}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="flex items-center space-x-3">
|
||||
<div class="flex items-center space-x-2">
|
||||
<span class="text-sm text-gray-600 dark:text-gray-400">
|
||||
{{ configWithJob.enabled ? 'Enabled' : 'Disabled' }}
|
||||
</span>
|
||||
<Switch
|
||||
:model-value="configWithJob.enabled"
|
||||
:disabled="isToggling"
|
||||
@update:model-value="handleToggleJob"
|
||||
/>
|
||||
</div>
|
||||
<Button
|
||||
variant="outline"
|
||||
size="icon"
|
||||
:loading="isTriggering"
|
||||
:disabled="isToggling"
|
||||
@click="handleTriggerOrStopJob"
|
||||
>
|
||||
<span class="sr-only">{{
|
||||
configWithJob.isRunning ? 'Stop Backup Job' : 'Trigger Backup Job'
|
||||
}}</span>
|
||||
<StopIcon v-if="configWithJob.isRunning" class="h-5 w-5" />
|
||||
<PlayIcon v-else class="h-5 w-5" />
|
||||
</Button>
|
||||
<Button
|
||||
variant="outline"
|
||||
size="icon"
|
||||
:disabled="isToggling || configWithJob.isRunning"
|
||||
@click="emit('edit', configId)"
|
||||
>
|
||||
<span class="sr-only">Edit Backup Job</span>
|
||||
<PencilIcon class="h-5 w-5" />
|
||||
</Button>
|
||||
<Button
|
||||
variant="outline"
|
||||
size="icon"
|
||||
class="text-red-600 hover:text-red-700 dark:text-red-500 dark:hover:text-red-400 border-red-600 hover:border-red-700 dark:border-red-500 dark:hover:border-red-400"
|
||||
:disabled="isToggling || configWithJob.isRunning"
|
||||
@click="showDeleteConfirm = true"
|
||||
>
|
||||
<span class="sr-only">Delete Backup Job</span>
|
||||
<TrashIcon class="h-5 w-5" />
|
||||
</Button>
|
||||
<Badge
|
||||
:variant="
|
||||
configWithJob.runningJob?.status === BackupJobStatus.COMPLETED
|
||||
? 'green'
|
||||
: configWithJob.errorMessage ||
|
||||
configWithJob.runningJob?.status === BackupJobStatus.CANCELLED ||
|
||||
configWithJob.runningJob?.status === BackupJobStatus.FAILED
|
||||
? 'red'
|
||||
: configWithJob.isRunning
|
||||
? 'blue'
|
||||
: configWithJob.enabled
|
||||
? 'green'
|
||||
: 'gray'
|
||||
"
|
||||
size="sm"
|
||||
>
|
||||
{{
|
||||
configWithJob.hasRecentJob && configWithJob.runningJob?.status
|
||||
? configWithJob.runningJob.status.charAt(0).toUpperCase() +
|
||||
configWithJob.runningJob.status.slice(1).toLowerCase()
|
||||
: configWithJob.enabled
|
||||
? 'Active'
|
||||
: 'Inactive'
|
||||
}}
|
||||
</Badge>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Progress information for running or recently completed jobs -->
|
||||
<div
|
||||
v-if="configWithJob.hasRecentJob && configWithJob.runningJob"
|
||||
:class="[
|
||||
'mb-4 border rounded-lg p-4',
|
||||
configWithJob.runningJob?.status === BackupJobStatus.COMPLETED
|
||||
? 'bg-green-50 dark:bg-green-900/20 border-green-200 dark:border-green-800'
|
||||
: configWithJob.errorMessage ||
|
||||
configWithJob.runningJob?.status === BackupJobStatus.CANCELLED ||
|
||||
configWithJob.runningJob?.status === BackupJobStatus.FAILED
|
||||
? 'bg-red-50 dark:bg-red-900/20 border-red-200 dark:border-red-800'
|
||||
: 'bg-blue-50 dark:bg-blue-900/20 border-blue-200 dark:border-blue-800',
|
||||
]"
|
||||
>
|
||||
<div
|
||||
:class="[
|
||||
'flex justify-between text-sm mb-3',
|
||||
configWithJob.runningJob?.status === BackupJobStatus.COMPLETED
|
||||
? 'text-green-700 dark:text-green-300'
|
||||
: configWithJob.errorMessage ||
|
||||
configWithJob.runningJob?.status === BackupJobStatus.CANCELLED ||
|
||||
configWithJob.runningJob?.status === BackupJobStatus.FAILED
|
||||
? 'text-red-700 dark:text-red-300'
|
||||
: 'text-blue-700 dark:text-blue-300',
|
||||
]"
|
||||
>
|
||||
<span class="font-medium">{{ configWithJob.runningJob.progress }}% complete</span>
|
||||
<span>{{ configWithJob.runningJob.formattedSpeed || 'N/A' }}</span>
|
||||
</div>
|
||||
<div
|
||||
:class="[
|
||||
'w-full rounded-full h-2 mb-3',
|
||||
configWithJob.runningJob?.status === BackupJobStatus.COMPLETED
|
||||
? 'bg-green-200 dark:bg-green-700'
|
||||
: configWithJob.errorMessage ||
|
||||
configWithJob.runningJob?.status === BackupJobStatus.CANCELLED ||
|
||||
configWithJob.runningJob?.status === BackupJobStatus.FAILED
|
||||
? 'bg-red-200 dark:bg-red-700'
|
||||
: 'bg-blue-200 dark:bg-blue-700',
|
||||
]"
|
||||
>
|
||||
<div
|
||||
:class="[
|
||||
'h-2 rounded-full transition-all duration-300',
|
||||
configWithJob.runningJob?.status === BackupJobStatus.COMPLETED
|
||||
? 'bg-green-600'
|
||||
: configWithJob.errorMessage ||
|
||||
configWithJob.runningJob?.status === BackupJobStatus.CANCELLED ||
|
||||
configWithJob.runningJob?.status === BackupJobStatus.FAILED
|
||||
? 'bg-red-600'
|
||||
: 'bg-blue-600',
|
||||
]"
|
||||
:style="{ width: `${configWithJob.runningJob.progress}%` }"
|
||||
></div>
|
||||
</div>
|
||||
<div
|
||||
:class="[
|
||||
'grid grid-cols-2 md:grid-cols-4 gap-4 text-sm',
|
||||
configWithJob.runningJob?.status === BackupJobStatus.COMPLETED
|
||||
? 'text-green-700 dark:text-green-300'
|
||||
: configWithJob.errorMessage ||
|
||||
configWithJob.runningJob?.status === BackupJobStatus.CANCELLED ||
|
||||
configWithJob.runningJob?.status === BackupJobStatus.FAILED
|
||||
? 'text-red-700 dark:text-red-300'
|
||||
: 'text-blue-700 dark:text-blue-300',
|
||||
]"
|
||||
>
|
||||
<div>
|
||||
<span class="font-medium">Transferred:</span> {{ configWithJob.runningJob.formattedBytesTransferred || 'N/A' }}
|
||||
</div>
|
||||
<div>
|
||||
<span class="font-medium">Elapsed:</span> {{ configWithJob.runningJob.formattedElapsedTime || 'N/A' }}
|
||||
</div>
|
||||
<div v-if="configWithJob.runningJob?.status === BackupJobStatus.RUNNING">
|
||||
<span class="font-medium">ETA:</span> {{ configWithJob.runningJob.formattedEta || 'N/A' }}
|
||||
</div>
|
||||
<div v-else>
|
||||
<span class="font-medium">Status:</span>
|
||||
{{
|
||||
configWithJob.runningJob?.status
|
||||
? configWithJob.runningJob.status.charAt(0).toUpperCase() +
|
||||
configWithJob.runningJob.status.slice(1).toLowerCase()
|
||||
: 'Unknown'
|
||||
}}
|
||||
</div>
|
||||
<div><span class="font-medium">Total:</span> {{ configWithJob.runningJob.totalBytes ? (configWithJob.runningJob.totalBytes / (1024 * 1024 * 1024)).toFixed(2) + ' GB' : 'N/A' }}</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Schedule and status information -->
|
||||
<div class="grid grid-cols-1 md:grid-cols-4 gap-4">
|
||||
<div class="bg-gray-50 dark:bg-gray-700 rounded-lg p-3">
|
||||
<dt class="text-sm font-medium text-gray-500 dark:text-gray-400">Schedule</dt>
|
||||
<dd class="mt-1 text-sm text-gray-900 dark:text-white">
|
||||
{{ configWithJob.schedule }}
|
||||
</dd>
|
||||
</div>
|
||||
|
||||
<div class="bg-gray-50 dark:bg-gray-700 rounded-lg p-3">
|
||||
<dt class="text-sm font-medium text-gray-500 dark:text-gray-400">Backup Type</dt>
|
||||
<dd class="mt-1 text-sm text-gray-900 dark:text-white">
|
||||
{{ configWithJob.sourceConfig?.label || 'Unknown Backup Type' }}
|
||||
</dd>
|
||||
</div>
|
||||
|
||||
<div class="bg-gray-50 dark:bg-gray-700 rounded-lg p-3">
|
||||
<dt class="text-sm font-medium text-gray-500 dark:text-gray-400">Last Run</dt>
|
||||
<dd class="mt-1 text-sm text-gray-900 dark:text-white">
|
||||
{{ configWithJob.lastRunAt ? formatDate(configWithJob.lastRunAt) : 'Never' }}
|
||||
</dd>
|
||||
</div>
|
||||
|
||||
<div class="bg-gray-50 dark:bg-gray-700 rounded-lg p-3">
|
||||
<dt class="text-sm font-medium text-gray-500 dark:text-gray-400">Status</dt>
|
||||
<dd class="mt-1 text-sm text-gray-900 dark:text-white">
|
||||
{{ configWithJob.isRunning ? 'Running' : configWithJob.lastRunStatus || 'Not run yet' }}
|
||||
</dd>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
105
web/components/Backup/BackupOverview.vue
Normal file
105
web/components/Backup/BackupOverview.vue
Normal file
@@ -0,0 +1,105 @@
|
||||
<script setup lang="ts">
|
||||
import { ref } from 'vue';
|
||||
import { useQuery } from '@vue/apollo-composable';
|
||||
|
||||
import type { JobStatusFragment } from '~/composables/gql/graphql';
|
||||
|
||||
import { useFragment } from '~/composables/gql';
|
||||
import { BACKUP_JOBS_QUERY, JOB_STATUS_FRAGMENT } from './backup-jobs.query';
|
||||
import BackupEntry from './BackupEntry.vue';
|
||||
import BackupJobConfig from './BackupJobConfig.vue';
|
||||
|
||||
const showSystemJobs = ref(false);
|
||||
|
||||
const { result, loading, error, refetch } = useQuery(
|
||||
BACKUP_JOBS_QUERY,
|
||||
{},
|
||||
{
|
||||
fetchPolicy: 'cache-and-network',
|
||||
pollInterval: 5000, // Poll every 5 seconds for real-time updates
|
||||
}
|
||||
);
|
||||
|
||||
const jobs = computed<JobStatusFragment[]>(() => {
|
||||
return result.value?.backup?.jobs.map((job) => useFragment(JOB_STATUS_FRAGMENT, job)) || [];
|
||||
});
|
||||
|
||||
// Enhanced refresh function that forces a network request
|
||||
const refreshJobs = async () => {
|
||||
await refetch();
|
||||
};
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div class="backup-overview">
|
||||
<div class="flex items-center justify-between mb-6">
|
||||
<h1 class="text-2xl font-bold text-gray-900 dark:text-white">Backup Management</h1>
|
||||
<button
|
||||
class="px-4 py-2 bg-blue-600 text-white rounded-lg hover:bg-blue-700 transition-colors"
|
||||
:disabled="loading"
|
||||
@click="refreshJobs"
|
||||
>
|
||||
{{ loading ? 'Refreshing...' : 'Refresh' }}
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<!-- Backup Job Configurations -->
|
||||
<BackupJobConfig />
|
||||
|
||||
<!-- Running Backup Jobs Section -->
|
||||
<div class="mt-8">
|
||||
<div class="flex items-center justify-between mb-6">
|
||||
<h2 class="text-xl font-bold text-gray-900 dark:text-white">Running Backup Jobs</h2>
|
||||
<div class="flex items-center space-x-3">
|
||||
<label class="relative inline-flex items-center cursor-pointer">
|
||||
<input v-model="showSystemJobs" type="checkbox" class="sr-only peer" />
|
||||
<div
|
||||
class="w-11 h-6 bg-gray-200 peer-focus:outline-none peer-focus:ring-4 peer-focus:ring-blue-300 dark:peer-focus:ring-blue-800 rounded-full peer dark:bg-gray-700 peer-checked:after:translate-x-full peer-checked:after:border-white after:content-[''] after:absolute after:top-[2px] after:left-[2px] after:bg-white after:border-gray-300 after:border after:rounded-full after:h-5 after:w-5 after:transition-all dark:border-gray-600 peer-checked:bg-blue-600"
|
||||
></div>
|
||||
<span class="ml-3 text-sm font-medium text-gray-900 dark:text-gray-300">
|
||||
Show system jobs
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div v-if="loading && !result" class="text-center py-8">
|
||||
<div class="animate-spin rounded-full h-8 w-8 border-b-2 border-blue-600 mx-auto"></div>
|
||||
<p class="mt-2 text-gray-600 dark:text-gray-400">Loading backup jobs...</p>
|
||||
</div>
|
||||
|
||||
<div
|
||||
v-else-if="error"
|
||||
class="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-4"
|
||||
>
|
||||
<div class="flex">
|
||||
<div class="ml-3">
|
||||
<h3 class="text-sm font-medium text-red-800 dark:text-red-200">Error loading backup jobs</h3>
|
||||
<div class="mt-2 text-sm text-red-700 dark:text-red-300">
|
||||
{{ error.message }}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div v-else-if="!jobs?.length" class="text-center py-12">
|
||||
<div class="text-gray-400 dark:text-gray-600 mb-4">
|
||||
<svg class="w-12 h-12 mx-auto" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path
|
||||
stroke-linecap="round"
|
||||
stroke-linejoin="round"
|
||||
stroke-width="2"
|
||||
d="M9 12h6m-6 4h6m2 5H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z"
|
||||
/>
|
||||
</svg>
|
||||
</div>
|
||||
<h3 class="text-lg font-medium text-gray-900 dark:text-white mb-2">No backup jobs running</h3>
|
||||
<p class="text-gray-600 dark:text-gray-400">There are currently no active backup operations.</p>
|
||||
</div>
|
||||
|
||||
<div v-else class="space-y-4">
|
||||
<BackupEntry v-for="job in jobs" :key="job.id" :job="job" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
228
web/components/Backup/backup-jobs.query.ts
Normal file
228
web/components/Backup/backup-jobs.query.ts
Normal file
@@ -0,0 +1,228 @@
|
||||
import { graphql } from '~/composables/gql/gql';
|
||||
|
||||
export const JOB_STATUS_FRAGMENT = graphql(/* GraphQL */ `
|
||||
fragment JobStatus on JobStatus {
|
||||
id
|
||||
externalJobId
|
||||
name
|
||||
status
|
||||
progress
|
||||
message
|
||||
error
|
||||
startTime
|
||||
endTime
|
||||
bytesTransferred
|
||||
totalBytes
|
||||
speed
|
||||
elapsedTime
|
||||
eta
|
||||
formattedBytesTransferred
|
||||
formattedSpeed
|
||||
formattedElapsedTime
|
||||
formattedEta
|
||||
}
|
||||
`);
|
||||
|
||||
export const SOURCE_CONFIG_FRAGMENT = graphql(/* GraphQL */ `
|
||||
fragment SourceConfig on SourceConfigUnion {
|
||||
... on ZfsPreprocessConfig {
|
||||
label
|
||||
poolName
|
||||
datasetName
|
||||
snapshotPrefix
|
||||
cleanupSnapshots
|
||||
retainSnapshots
|
||||
}
|
||||
... on FlashPreprocessConfig {
|
||||
label
|
||||
flashPath
|
||||
includeGitHistory
|
||||
additionalPaths
|
||||
}
|
||||
... on ScriptPreprocessConfig {
|
||||
label
|
||||
scriptPath
|
||||
scriptArgs
|
||||
workingDirectory
|
||||
environment
|
||||
outputPath
|
||||
}
|
||||
... on RawBackupConfig {
|
||||
label
|
||||
sourcePath
|
||||
excludePatterns
|
||||
includePatterns
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export const DESTINATION_CONFIG_FRAGMENT = graphql(/* GraphQL */ `
|
||||
fragment DestinationConfig on DestinationConfigUnion {
|
||||
... on RcloneDestinationConfig {
|
||||
type
|
||||
remoteName
|
||||
destinationPath
|
||||
rcloneOptions
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export const BACKUP_JOB_CONFIG_FRAGMENT = graphql(/* GraphQL */ `
|
||||
fragment BackupJobConfig on BackupJobConfig {
|
||||
id
|
||||
name
|
||||
sourceType
|
||||
destinationType
|
||||
schedule
|
||||
enabled
|
||||
sourceConfig {
|
||||
...SourceConfig
|
||||
}
|
||||
destinationConfig {
|
||||
...DestinationConfig
|
||||
}
|
||||
createdAt
|
||||
updatedAt
|
||||
lastRunAt
|
||||
lastRunStatus
|
||||
}
|
||||
`);
|
||||
|
||||
export const BACKUP_JOB_CONFIG_WITH_CURRENT_JOB_FRAGMENT = graphql(/* GraphQL */ `
|
||||
fragment BackupJobConfigWithCurrentJob on BackupJobConfig {
|
||||
...BackupJobConfig
|
||||
currentJob {
|
||||
...JobStatus
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export const BACKUP_JOBS_QUERY = graphql(/* GraphQL */ `
|
||||
query BackupJobs {
|
||||
backup {
|
||||
id
|
||||
jobs {
|
||||
...JobStatus
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export const BACKUP_JOB_QUERY = graphql(/* GraphQL */ `
|
||||
query BackupJob($id: PrefixedID!) {
|
||||
backupJob(id: $id) {
|
||||
...JobStatus
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export const BACKUP_JOB_CONFIG_QUERY = graphql(/* GraphQL */ `
|
||||
query BackupJobConfig($id: PrefixedID!) {
|
||||
backupJobConfig(id: $id) {
|
||||
...BackupJobConfigWithCurrentJob
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export const BACKUP_JOB_CONFIGS_QUERY = graphql(/* GraphQL */ `
|
||||
query BackupJobConfigs {
|
||||
backup {
|
||||
id
|
||||
configs {
|
||||
...BackupJobConfigWithCurrentJob
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export const BACKUP_JOB_CONFIGS_LIST_QUERY = graphql(/* GraphQL */ `
|
||||
query BackupJobConfigsList {
|
||||
backup {
|
||||
id
|
||||
configs {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export const BACKUP_JOB_CONFIG_FORM_QUERY = graphql(/* GraphQL */ `
|
||||
query BackupJobConfigForm($input: BackupJobConfigFormInput) {
|
||||
backupJobConfigForm(input: $input) {
|
||||
id
|
||||
dataSchema
|
||||
uiSchema
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export const CREATE_BACKUP_JOB_CONFIG_MUTATION = graphql(/* GraphQL */ `
|
||||
mutation CreateBackupJobConfig($input: CreateBackupJobConfigInput!) {
|
||||
backup {
|
||||
createBackupJobConfig(input: $input) {
|
||||
...BackupJobConfig
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export const UPDATE_BACKUP_JOB_CONFIG_MUTATION = graphql(/* GraphQL */ `
|
||||
mutation UpdateBackupJobConfig($id: PrefixedID!, $input: UpdateBackupJobConfigInput!) {
|
||||
backup {
|
||||
updateBackupJobConfig(id: $id, input: $input) {
|
||||
...BackupJobConfig
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export const DELETE_BACKUP_JOB_CONFIG_MUTATION = graphql(/* GraphQL */ `
|
||||
mutation DeleteBackupJobConfig($id: PrefixedID!) {
|
||||
backup {
|
||||
deleteBackupJobConfig(id: $id)
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export const TOGGLE_BACKUP_JOB_CONFIG_MUTATION = graphql(/* GraphQL */ `
|
||||
mutation ToggleBackupJobConfig($id: PrefixedID!) {
|
||||
backup {
|
||||
toggleJobConfig(id: $id) {
|
||||
...BackupJobConfig
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export const TRIGGER_BACKUP_JOB_MUTATION = graphql(/* GraphQL */ `
|
||||
mutation TriggerBackupJob($id: PrefixedID!) {
|
||||
backup {
|
||||
triggerJob(id: $id) {
|
||||
jobId
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export const STOP_BACKUP_JOB_MUTATION = graphql(/* GraphQL */ `
|
||||
mutation StopBackupJob($id: PrefixedID!) {
|
||||
backup {
|
||||
stopBackupJob(id: $id) {
|
||||
status
|
||||
jobId
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export const INITIATE_BACKUP_MUTATION = graphql(/* GraphQL */ `
|
||||
mutation InitiateBackup($input: InitiateBackupInput!) {
|
||||
backup {
|
||||
initiateBackup(input: $input) {
|
||||
status
|
||||
jobId
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
@@ -30,7 +30,7 @@ interface ConfigStep {
|
||||
|
||||
// Form state
|
||||
const formState = ref(props.initialState || {
|
||||
configStep: 0 as number | ConfigStep,
|
||||
configStep: { current: 0, total: 0 },
|
||||
showAdvanced: false,
|
||||
name: '',
|
||||
type: '',
|
||||
@@ -56,13 +56,8 @@ watch(
|
||||
formState,
|
||||
async (newValue, oldValue) => {
|
||||
// Get current step as number for comparison
|
||||
const newStep = typeof newValue.configStep === 'object'
|
||||
? (newValue.configStep as ConfigStep).current
|
||||
: newValue.configStep as number;
|
||||
|
||||
const oldStep = typeof oldValue.configStep === 'object'
|
||||
? (oldValue.configStep as ConfigStep).current
|
||||
: oldValue.configStep as number;
|
||||
const newStep = (newValue.configStep as ConfigStep)?.current ?? 0;
|
||||
const oldStep = (oldValue.configStep as ConfigStep)?.current ?? 0;
|
||||
|
||||
// Check if we need to refetch
|
||||
const shouldRefetch =
|
||||
@@ -138,7 +133,7 @@ onCreateDone(async ({ data }) => {
|
||||
|
||||
// Reset form and emit complete event
|
||||
formState.value = {
|
||||
configStep: 0,
|
||||
configStep: { current: 0, total: 0 },
|
||||
showAdvanced: false,
|
||||
name: '',
|
||||
type: '',
|
||||
@@ -179,8 +174,8 @@ const uiSchema = computed(() => formResult.value?.rclone?.configForm?.uiSchema);
|
||||
|
||||
// Handle both number and object formats of configStep
|
||||
const getCurrentStep = computed(() => {
|
||||
const step = formState.value.configStep;
|
||||
return typeof step === 'object' ? (step as ConfigStep).current : step as number;
|
||||
const step = formState.value.configStep as ConfigStep;
|
||||
return step?.current ?? 0;
|
||||
});
|
||||
|
||||
// Get total steps from UI schema
|
||||
|
||||
@@ -10,7 +10,7 @@ import RCloneConfig from './RCloneConfig.vue';
|
||||
import RemoteItem from './RemoteItem.vue';
|
||||
|
||||
interface FormState {
|
||||
configStep: number;
|
||||
configStep: { current: number; total: number };
|
||||
showAdvanced: boolean;
|
||||
name: string;
|
||||
type: string;
|
||||
@@ -63,7 +63,7 @@ const openCryptModal = (remote: { name: string, type: string }) => {
|
||||
const entropy = Math.random().toString(36).substring(2, 8);
|
||||
selectedRemote.value = remote;
|
||||
initialFormState.value = {
|
||||
configStep: 0,
|
||||
configStep: { current: 0, total: 0 },
|
||||
showAdvanced: false,
|
||||
name: `${remote.name}-crypt-${entropy}`,
|
||||
type: 'crypt',
|
||||
|
||||
@@ -23,6 +23,24 @@ type Documents = {
|
||||
"\n mutation UpdateApiKey($input: UpdateApiKeyInput!) {\n apiKey {\n update(input: $input) {\n ...ApiKeyWithKey\n }\n }\n }\n": typeof types.UpdateApiKeyDocument,
|
||||
"\n mutation DeleteApiKey($input: DeleteApiKeyInput!) {\n apiKey {\n delete(input: $input)\n }\n }\n": typeof types.DeleteApiKeyDocument,
|
||||
"\n query ApiKeyMeta {\n apiKeyPossibleRoles\n apiKeyPossiblePermissions {\n resource\n actions\n }\n }\n": typeof types.ApiKeyMetaDocument,
|
||||
"\n fragment JobStatus on JobStatus {\n id\n externalJobId\n name\n status\n progress\n message\n error\n startTime\n endTime\n bytesTransferred\n totalBytes\n speed\n elapsedTime\n eta\n formattedBytesTransferred\n formattedSpeed\n formattedElapsedTime\n formattedEta\n }\n": typeof types.JobStatusFragmentDoc,
|
||||
"\n fragment SourceConfig on SourceConfigUnion {\n ... on ZfsPreprocessConfig {\n label\n poolName\n datasetName\n snapshotPrefix\n cleanupSnapshots\n retainSnapshots\n }\n ... on FlashPreprocessConfig {\n label\n flashPath\n includeGitHistory\n additionalPaths\n }\n ... on ScriptPreprocessConfig {\n label\n scriptPath\n scriptArgs\n workingDirectory\n environment\n outputPath\n }\n ... on RawBackupConfig {\n label\n sourcePath\n excludePatterns\n includePatterns\n }\n }\n": typeof types.SourceConfigFragmentDoc,
|
||||
"\n fragment DestinationConfig on DestinationConfigUnion {\n ... on RcloneDestinationConfig {\n type\n remoteName\n destinationPath\n rcloneOptions\n }\n }\n": typeof types.DestinationConfigFragmentDoc,
|
||||
"\n fragment BackupJobConfig on BackupJobConfig {\n id\n name\n sourceType\n destinationType\n schedule\n enabled\n sourceConfig {\n ...SourceConfig\n }\n destinationConfig {\n ...DestinationConfig\n }\n createdAt\n updatedAt\n lastRunAt\n lastRunStatus\n }\n": typeof types.BackupJobConfigFragmentDoc,
|
||||
"\n fragment BackupJobConfigWithCurrentJob on BackupJobConfig {\n ...BackupJobConfig\n currentJob {\n ...JobStatus\n }\n }\n": typeof types.BackupJobConfigWithCurrentJobFragmentDoc,
|
||||
"\n query BackupJobs {\n backup {\n id\n jobs {\n ...JobStatus\n }\n }\n }\n": typeof types.BackupJobsDocument,
|
||||
"\n query BackupJob($id: PrefixedID!) {\n backupJob(id: $id) {\n ...JobStatus\n }\n }\n": typeof types.BackupJobDocument,
|
||||
"\n query BackupJobConfig($id: PrefixedID!) {\n backupJobConfig(id: $id) {\n ...BackupJobConfigWithCurrentJob\n }\n }\n": typeof types.BackupJobConfigDocument,
|
||||
"\n query BackupJobConfigs {\n backup {\n id\n configs {\n ...BackupJobConfigWithCurrentJob\n }\n }\n }\n": typeof types.BackupJobConfigsDocument,
|
||||
"\n query BackupJobConfigsList {\n backup {\n id\n configs {\n id\n name\n }\n }\n }\n": typeof types.BackupJobConfigsListDocument,
|
||||
"\n query BackupJobConfigForm($input: BackupJobConfigFormInput) {\n backupJobConfigForm(input: $input) {\n id\n dataSchema\n uiSchema\n }\n }\n": typeof types.BackupJobConfigFormDocument,
|
||||
"\n mutation CreateBackupJobConfig($input: CreateBackupJobConfigInput!) {\n backup {\n createBackupJobConfig(input: $input) {\n ...BackupJobConfig\n }\n }\n }\n": typeof types.CreateBackupJobConfigDocument,
|
||||
"\n mutation UpdateBackupJobConfig($id: PrefixedID!, $input: UpdateBackupJobConfigInput!) {\n backup {\n updateBackupJobConfig(id: $id, input: $input) {\n ...BackupJobConfig\n }\n }\n }\n": typeof types.UpdateBackupJobConfigDocument,
|
||||
"\n mutation DeleteBackupJobConfig($id: PrefixedID!) {\n backup {\n deleteBackupJobConfig(id: $id)\n }\n }\n": typeof types.DeleteBackupJobConfigDocument,
|
||||
"\n mutation ToggleBackupJobConfig($id: PrefixedID!) {\n backup {\n toggleJobConfig(id: $id) {\n ...BackupJobConfig\n }\n }\n }\n": typeof types.ToggleBackupJobConfigDocument,
|
||||
"\n mutation TriggerBackupJob($id: PrefixedID!) {\n backup {\n triggerJob(id: $id) {\n jobId\n }\n }\n }\n": typeof types.TriggerBackupJobDocument,
|
||||
"\n mutation StopBackupJob($id: PrefixedID!) {\n backup {\n stopBackupJob(id: $id) {\n status\n jobId\n }\n }\n }\n": typeof types.StopBackupJobDocument,
|
||||
"\n mutation InitiateBackup($input: InitiateBackupInput!) {\n backup {\n initiateBackup(input: $input) {\n status\n jobId\n }\n }\n }\n": typeof types.InitiateBackupDocument,
|
||||
"\n query Unified {\n settings {\n unified {\n id\n dataSchema\n uiSchema\n values\n }\n }\n }\n": typeof types.UnifiedDocument,
|
||||
"\n mutation UpdateConnectSettings($input: JSON!) {\n updateSettings(input: $input) {\n restartRequired\n values\n }\n }\n": typeof types.UpdateConnectSettingsDocument,
|
||||
"\n query LogFiles {\n logFiles {\n name\n path\n size\n modifiedAt\n }\n }\n": typeof types.LogFilesDocument,
|
||||
@@ -61,6 +79,24 @@ const documents: Documents = {
|
||||
"\n mutation UpdateApiKey($input: UpdateApiKeyInput!) {\n apiKey {\n update(input: $input) {\n ...ApiKeyWithKey\n }\n }\n }\n": types.UpdateApiKeyDocument,
|
||||
"\n mutation DeleteApiKey($input: DeleteApiKeyInput!) {\n apiKey {\n delete(input: $input)\n }\n }\n": types.DeleteApiKeyDocument,
|
||||
"\n query ApiKeyMeta {\n apiKeyPossibleRoles\n apiKeyPossiblePermissions {\n resource\n actions\n }\n }\n": types.ApiKeyMetaDocument,
|
||||
"\n fragment JobStatus on JobStatus {\n id\n externalJobId\n name\n status\n progress\n message\n error\n startTime\n endTime\n bytesTransferred\n totalBytes\n speed\n elapsedTime\n eta\n formattedBytesTransferred\n formattedSpeed\n formattedElapsedTime\n formattedEta\n }\n": types.JobStatusFragmentDoc,
|
||||
"\n fragment SourceConfig on SourceConfigUnion {\n ... on ZfsPreprocessConfig {\n label\n poolName\n datasetName\n snapshotPrefix\n cleanupSnapshots\n retainSnapshots\n }\n ... on FlashPreprocessConfig {\n label\n flashPath\n includeGitHistory\n additionalPaths\n }\n ... on ScriptPreprocessConfig {\n label\n scriptPath\n scriptArgs\n workingDirectory\n environment\n outputPath\n }\n ... on RawBackupConfig {\n label\n sourcePath\n excludePatterns\n includePatterns\n }\n }\n": types.SourceConfigFragmentDoc,
|
||||
"\n fragment DestinationConfig on DestinationConfigUnion {\n ... on RcloneDestinationConfig {\n type\n remoteName\n destinationPath\n rcloneOptions\n }\n }\n": types.DestinationConfigFragmentDoc,
|
||||
"\n fragment BackupJobConfig on BackupJobConfig {\n id\n name\n sourceType\n destinationType\n schedule\n enabled\n sourceConfig {\n ...SourceConfig\n }\n destinationConfig {\n ...DestinationConfig\n }\n createdAt\n updatedAt\n lastRunAt\n lastRunStatus\n }\n": types.BackupJobConfigFragmentDoc,
|
||||
"\n fragment BackupJobConfigWithCurrentJob on BackupJobConfig {\n ...BackupJobConfig\n currentJob {\n ...JobStatus\n }\n }\n": types.BackupJobConfigWithCurrentJobFragmentDoc,
|
||||
"\n query BackupJobs {\n backup {\n id\n jobs {\n ...JobStatus\n }\n }\n }\n": types.BackupJobsDocument,
|
||||
"\n query BackupJob($id: PrefixedID!) {\n backupJob(id: $id) {\n ...JobStatus\n }\n }\n": types.BackupJobDocument,
|
||||
"\n query BackupJobConfig($id: PrefixedID!) {\n backupJobConfig(id: $id) {\n ...BackupJobConfigWithCurrentJob\n }\n }\n": types.BackupJobConfigDocument,
|
||||
"\n query BackupJobConfigs {\n backup {\n id\n configs {\n ...BackupJobConfigWithCurrentJob\n }\n }\n }\n": types.BackupJobConfigsDocument,
|
||||
"\n query BackupJobConfigsList {\n backup {\n id\n configs {\n id\n name\n }\n }\n }\n": types.BackupJobConfigsListDocument,
|
||||
"\n query BackupJobConfigForm($input: BackupJobConfigFormInput) {\n backupJobConfigForm(input: $input) {\n id\n dataSchema\n uiSchema\n }\n }\n": types.BackupJobConfigFormDocument,
|
||||
"\n mutation CreateBackupJobConfig($input: CreateBackupJobConfigInput!) {\n backup {\n createBackupJobConfig(input: $input) {\n ...BackupJobConfig\n }\n }\n }\n": types.CreateBackupJobConfigDocument,
|
||||
"\n mutation UpdateBackupJobConfig($id: PrefixedID!, $input: UpdateBackupJobConfigInput!) {\n backup {\n updateBackupJobConfig(id: $id, input: $input) {\n ...BackupJobConfig\n }\n }\n }\n": types.UpdateBackupJobConfigDocument,
|
||||
"\n mutation DeleteBackupJobConfig($id: PrefixedID!) {\n backup {\n deleteBackupJobConfig(id: $id)\n }\n }\n": types.DeleteBackupJobConfigDocument,
|
||||
"\n mutation ToggleBackupJobConfig($id: PrefixedID!) {\n backup {\n toggleJobConfig(id: $id) {\n ...BackupJobConfig\n }\n }\n }\n": types.ToggleBackupJobConfigDocument,
|
||||
"\n mutation TriggerBackupJob($id: PrefixedID!) {\n backup {\n triggerJob(id: $id) {\n jobId\n }\n }\n }\n": types.TriggerBackupJobDocument,
|
||||
"\n mutation StopBackupJob($id: PrefixedID!) {\n backup {\n stopBackupJob(id: $id) {\n status\n jobId\n }\n }\n }\n": types.StopBackupJobDocument,
|
||||
"\n mutation InitiateBackup($input: InitiateBackupInput!) {\n backup {\n initiateBackup(input: $input) {\n status\n jobId\n }\n }\n }\n": types.InitiateBackupDocument,
|
||||
"\n query Unified {\n settings {\n unified {\n id\n dataSchema\n uiSchema\n values\n }\n }\n }\n": types.UnifiedDocument,
|
||||
"\n mutation UpdateConnectSettings($input: JSON!) {\n updateSettings(input: $input) {\n restartRequired\n values\n }\n }\n": types.UpdateConnectSettingsDocument,
|
||||
"\n query LogFiles {\n logFiles {\n name\n path\n size\n modifiedAt\n }\n }\n": types.LogFilesDocument,
|
||||
@@ -140,6 +176,78 @@ export function graphql(source: "\n mutation DeleteApiKey($input: DeleteApiKeyI
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function graphql(source: "\n query ApiKeyMeta {\n apiKeyPossibleRoles\n apiKeyPossiblePermissions {\n resource\n actions\n }\n }\n"): (typeof documents)["\n query ApiKeyMeta {\n apiKeyPossibleRoles\n apiKeyPossiblePermissions {\n resource\n actions\n }\n }\n"];
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function graphql(source: "\n fragment JobStatus on JobStatus {\n id\n externalJobId\n name\n status\n progress\n message\n error\n startTime\n endTime\n bytesTransferred\n totalBytes\n speed\n elapsedTime\n eta\n formattedBytesTransferred\n formattedSpeed\n formattedElapsedTime\n formattedEta\n }\n"): (typeof documents)["\n fragment JobStatus on JobStatus {\n id\n externalJobId\n name\n status\n progress\n message\n error\n startTime\n endTime\n bytesTransferred\n totalBytes\n speed\n elapsedTime\n eta\n formattedBytesTransferred\n formattedSpeed\n formattedElapsedTime\n formattedEta\n }\n"];
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function graphql(source: "\n fragment SourceConfig on SourceConfigUnion {\n ... on ZfsPreprocessConfig {\n label\n poolName\n datasetName\n snapshotPrefix\n cleanupSnapshots\n retainSnapshots\n }\n ... on FlashPreprocessConfig {\n label\n flashPath\n includeGitHistory\n additionalPaths\n }\n ... on ScriptPreprocessConfig {\n label\n scriptPath\n scriptArgs\n workingDirectory\n environment\n outputPath\n }\n ... on RawBackupConfig {\n label\n sourcePath\n excludePatterns\n includePatterns\n }\n }\n"): (typeof documents)["\n fragment SourceConfig on SourceConfigUnion {\n ... on ZfsPreprocessConfig {\n label\n poolName\n datasetName\n snapshotPrefix\n cleanupSnapshots\n retainSnapshots\n }\n ... on FlashPreprocessConfig {\n label\n flashPath\n includeGitHistory\n additionalPaths\n }\n ... on ScriptPreprocessConfig {\n label\n scriptPath\n scriptArgs\n workingDirectory\n environment\n outputPath\n }\n ... on RawBackupConfig {\n label\n sourcePath\n excludePatterns\n includePatterns\n }\n }\n"];
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function graphql(source: "\n fragment DestinationConfig on DestinationConfigUnion {\n ... on RcloneDestinationConfig {\n type\n remoteName\n destinationPath\n rcloneOptions\n }\n }\n"): (typeof documents)["\n fragment DestinationConfig on DestinationConfigUnion {\n ... on RcloneDestinationConfig {\n type\n remoteName\n destinationPath\n rcloneOptions\n }\n }\n"];
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function graphql(source: "\n fragment BackupJobConfig on BackupJobConfig {\n id\n name\n sourceType\n destinationType\n schedule\n enabled\n sourceConfig {\n ...SourceConfig\n }\n destinationConfig {\n ...DestinationConfig\n }\n createdAt\n updatedAt\n lastRunAt\n lastRunStatus\n }\n"): (typeof documents)["\n fragment BackupJobConfig on BackupJobConfig {\n id\n name\n sourceType\n destinationType\n schedule\n enabled\n sourceConfig {\n ...SourceConfig\n }\n destinationConfig {\n ...DestinationConfig\n }\n createdAt\n updatedAt\n lastRunAt\n lastRunStatus\n }\n"];
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function graphql(source: "\n fragment BackupJobConfigWithCurrentJob on BackupJobConfig {\n ...BackupJobConfig\n currentJob {\n ...JobStatus\n }\n }\n"): (typeof documents)["\n fragment BackupJobConfigWithCurrentJob on BackupJobConfig {\n ...BackupJobConfig\n currentJob {\n ...JobStatus\n }\n }\n"];
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function graphql(source: "\n query BackupJobs {\n backup {\n id\n jobs {\n ...JobStatus\n }\n }\n }\n"): (typeof documents)["\n query BackupJobs {\n backup {\n id\n jobs {\n ...JobStatus\n }\n }\n }\n"];
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function graphql(source: "\n query BackupJob($id: PrefixedID!) {\n backupJob(id: $id) {\n ...JobStatus\n }\n }\n"): (typeof documents)["\n query BackupJob($id: PrefixedID!) {\n backupJob(id: $id) {\n ...JobStatus\n }\n }\n"];
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function graphql(source: "\n query BackupJobConfig($id: PrefixedID!) {\n backupJobConfig(id: $id) {\n ...BackupJobConfigWithCurrentJob\n }\n }\n"): (typeof documents)["\n query BackupJobConfig($id: PrefixedID!) {\n backupJobConfig(id: $id) {\n ...BackupJobConfigWithCurrentJob\n }\n }\n"];
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function graphql(source: "\n query BackupJobConfigs {\n backup {\n id\n configs {\n ...BackupJobConfigWithCurrentJob\n }\n }\n }\n"): (typeof documents)["\n query BackupJobConfigs {\n backup {\n id\n configs {\n ...BackupJobConfigWithCurrentJob\n }\n }\n }\n"];
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function graphql(source: "\n query BackupJobConfigsList {\n backup {\n id\n configs {\n id\n name\n }\n }\n }\n"): (typeof documents)["\n query BackupJobConfigsList {\n backup {\n id\n configs {\n id\n name\n }\n }\n }\n"];
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function graphql(source: "\n query BackupJobConfigForm($input: BackupJobConfigFormInput) {\n backupJobConfigForm(input: $input) {\n id\n dataSchema\n uiSchema\n }\n }\n"): (typeof documents)["\n query BackupJobConfigForm($input: BackupJobConfigFormInput) {\n backupJobConfigForm(input: $input) {\n id\n dataSchema\n uiSchema\n }\n }\n"];
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function graphql(source: "\n mutation CreateBackupJobConfig($input: CreateBackupJobConfigInput!) {\n backup {\n createBackupJobConfig(input: $input) {\n ...BackupJobConfig\n }\n }\n }\n"): (typeof documents)["\n mutation CreateBackupJobConfig($input: CreateBackupJobConfigInput!) {\n backup {\n createBackupJobConfig(input: $input) {\n ...BackupJobConfig\n }\n }\n }\n"];
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function graphql(source: "\n mutation UpdateBackupJobConfig($id: PrefixedID!, $input: UpdateBackupJobConfigInput!) {\n backup {\n updateBackupJobConfig(id: $id, input: $input) {\n ...BackupJobConfig\n }\n }\n }\n"): (typeof documents)["\n mutation UpdateBackupJobConfig($id: PrefixedID!, $input: UpdateBackupJobConfigInput!) {\n backup {\n updateBackupJobConfig(id: $id, input: $input) {\n ...BackupJobConfig\n }\n }\n }\n"];
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function graphql(source: "\n mutation DeleteBackupJobConfig($id: PrefixedID!) {\n backup {\n deleteBackupJobConfig(id: $id)\n }\n }\n"): (typeof documents)["\n mutation DeleteBackupJobConfig($id: PrefixedID!) {\n backup {\n deleteBackupJobConfig(id: $id)\n }\n }\n"];
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function graphql(source: "\n mutation ToggleBackupJobConfig($id: PrefixedID!) {\n backup {\n toggleJobConfig(id: $id) {\n ...BackupJobConfig\n }\n }\n }\n"): (typeof documents)["\n mutation ToggleBackupJobConfig($id: PrefixedID!) {\n backup {\n toggleJobConfig(id: $id) {\n ...BackupJobConfig\n }\n }\n }\n"];
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function graphql(source: "\n mutation TriggerBackupJob($id: PrefixedID!) {\n backup {\n triggerJob(id: $id) {\n jobId\n }\n }\n }\n"): (typeof documents)["\n mutation TriggerBackupJob($id: PrefixedID!) {\n backup {\n triggerJob(id: $id) {\n jobId\n }\n }\n }\n"];
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function graphql(source: "\n mutation StopBackupJob($id: PrefixedID!) {\n backup {\n stopBackupJob(id: $id) {\n status\n jobId\n }\n }\n }\n"): (typeof documents)["\n mutation StopBackupJob($id: PrefixedID!) {\n backup {\n stopBackupJob(id: $id) {\n status\n jobId\n }\n }\n }\n"];
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
export function graphql(source: "\n mutation InitiateBackup($input: InitiateBackupInput!) {\n backup {\n initiateBackup(input: $input) {\n status\n jobId\n }\n }\n }\n"): (typeof documents)["\n mutation InitiateBackup($input: InitiateBackupInput!) {\n backup {\n initiateBackup(input: $input) {\n status\n jobId\n }\n }\n }\n"];
|
||||
/**
|
||||
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
|
||||
*/
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -39,6 +39,29 @@ const isDefined = <T>(value: T | null | undefined): value is T => {
|
||||
|
||||
const defaultCacheConfig: InMemoryCacheConfig = {
|
||||
typePolicies: {
|
||||
Query: {
|
||||
fields: {
|
||||
backup: {
|
||||
merge(_, incoming) {
|
||||
return incoming;
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Backup: {
|
||||
fields: {
|
||||
jobs: {
|
||||
merge(_, incoming) {
|
||||
return incoming;
|
||||
},
|
||||
},
|
||||
configs: {
|
||||
merge(_, incoming) {
|
||||
return incoming;
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Notifications: {
|
||||
fields: {
|
||||
list: {
|
||||
|
||||
@@ -1,35 +1,17 @@
|
||||
<template>
|
||||
<div class="text-black bg-white dark:text-white dark:bg-black">
|
||||
<ClientOnly>
|
||||
<div class="flex flex-row items-center justify-center gap-6 p-6 bg-white dark:bg-zinc-800">
|
||||
<template v-for="route in routes" :key="route.path">
|
||||
<NuxtLink
|
||||
:to="route.path"
|
||||
class="underline hover:no-underline focus:no-underline"
|
||||
active-class="text-orange"
|
||||
>
|
||||
{{ formatRouteName(route.name) }}
|
||||
</NuxtLink>
|
||||
</template>
|
||||
<ModalsCe />
|
||||
</div>
|
||||
<slot />
|
||||
</ClientOnly>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script setup>
|
||||
import { computed, watch } from 'vue';
|
||||
import { useRouter } from 'vue-router';
|
||||
import { storeToRefs } from 'pinia';
|
||||
import { ClientOnly, NuxtLink } from '#components';
|
||||
import { NuxtLink } from '#components';
|
||||
import { watch } from 'vue';
|
||||
import ModalsCe from '~/components/Modals.ce.vue';
|
||||
import { useThemeStore } from '~/store/theme';
|
||||
import { storeToRefs } from 'pinia';
|
||||
import { useFetch } from '#imports';
|
||||
|
||||
const router = useRouter();
|
||||
const themeStore = useThemeStore();
|
||||
const { theme } = storeToRefs(themeStore);
|
||||
|
||||
// Get routes from Nuxt's server-side route generation
|
||||
const { data: routes } = await useFetch('/api/routes');
|
||||
|
||||
// Watch for theme changes (satisfies linter by using theme)
|
||||
watch(
|
||||
theme,
|
||||
@@ -40,13 +22,6 @@ watch(
|
||||
{ immediate: true }
|
||||
);
|
||||
|
||||
const routes = computed(() => {
|
||||
return router
|
||||
.getRoutes()
|
||||
.filter((route) => !route.path.includes(':') && route.path !== '/404' && route.name)
|
||||
.sort((a, b) => a.path.localeCompare(b.path));
|
||||
});
|
||||
|
||||
function formatRouteName(name) {
|
||||
if (!name) return 'Home';
|
||||
// Convert route names like "web-components" to "Web Components"
|
||||
@@ -58,6 +33,22 @@ function formatRouteName(name) {
|
||||
}
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div class="text-black bg-white dark:text-white dark:bg-black">
|
||||
<ClientOnly>
|
||||
<div class="flex flex-row items-center justify-center gap-6 p-6 bg-white dark:bg-zinc-800">
|
||||
<template v-for="route in routes" :key="route.path">
|
||||
<NuxtLink :to="route.path" class="underline hover:no-underline focus:no-underline" active-class="text-orange">
|
||||
{{ formatRouteName(route.name) }}
|
||||
</NuxtLink>
|
||||
</template>
|
||||
<ModalsCe />
|
||||
</div>
|
||||
<slot />
|
||||
</ClientOnly>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<style lang="postcss">
|
||||
/* Import theme styles */
|
||||
@import '@unraid/ui/styles';
|
||||
|
||||
24
web/pages/flash-backup.vue
Normal file
24
web/pages/flash-backup.vue
Normal file
@@ -0,0 +1,24 @@
|
||||
<script setup>
|
||||
import BackupOverview from '~/components/Backup/BackupOverview.vue';
|
||||
import RCloneOverview from '~/components/RClone/RCloneOverview.vue';
|
||||
import { useDummyServerStore } from '~/_data/serverState';
|
||||
|
||||
const { registerEntry } = useCustomElements();
|
||||
|
||||
useDummyServerStore();
|
||||
|
||||
onBeforeMount(() => {
|
||||
registerEntry('UnraidComponents');
|
||||
});
|
||||
|
||||
onMounted(() => {
|
||||
document.cookie = 'unraid_session_cookie=mockusersession';
|
||||
});
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div>
|
||||
<BackupOverview />
|
||||
<RCloneOverview />
|
||||
</div>
|
||||
</template>
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user