diff --git a/.bivvy/m9X4-climb.md b/.bivvy/m9X4-climb.md new file mode 100644 index 000000000..51bd1bc08 --- /dev/null +++ b/.bivvy/m9X4-climb.md @@ -0,0 +1,1412 @@ + +
+ m9X4 + feature + Add preprocessing capabilities to backup jobs for ZFS pools, flash backups, Docker containers, and custom scripts +
+ None - will use existing system commands and utilities + None - extends existing backup system + + - api/src/unraid-api/graph/resolvers/backup/backup-config.service.ts (main service) + - api/src/unraid-api/graph/resolvers/backup/backup.model.ts (GraphQL models) + - plugin/source/dynamix.unraid.net/usr/local/emhttp/plugins/dynamix.my.servers/include/UpdateFlashBackup.php (flash backup reference) + - web/components/Backup/ (UI components) + + + +## Feature Overview + +**Feature Name**: Backup Job Preprocessing System +**Purpose**: Enable backup jobs to run preprocessing steps before the actual backup operation, supporting specialized backup scenarios like ZFS snapshots, flash drive backups, Docker container management, and custom user scripts. + +**Problem Being Solved**: +- Current backup system only supports direct file/folder backups via rclone +- ZFS pools need snapshot creation before backup +- Flash drive backups require git repository compression +- Users need ability to run custom preparation scripts + +**Success Metrics**: +- Backup jobs can successfully execute preprocessing steps +- ZFS snapshot backups work reliably +- Flash backup integration functions correctly +- Docker container backup workflows complete without data corruption +- Custom scripts execute safely in isolated environments + +## Requirements + +### Functional Requirements + +**Core Preprocessing Types**: +1. **ZFS Snapshot**: Create ZFS snapshot, stream snapshot data directly to destination +2. **Flash Backup**: Compress git repository from /boot/.git and stream directly to destination +3. **Custom Script**: Execute user-provided script for custom preprocessing (non-streaming) +4. **None**: Direct backup (current behavior) + +**Preprocessing Workflow**: +1. Execute preprocessing step +2. For streaming operations: pipe data directly to rclone daemon via rcat +3. For non-streaming operations: update sourcePath to preprocessed location +4. Execute cleanup/postprocessing if required +5. Log all steps and handle errors gracefully + +**Configuration Options**: +- Preprocessing type selection +- Type-specific parameters (ZFS pool name, Docker container name, script path) +- Streaming vs file-based backup mode +- Timeout settings for preprocessing steps +- Cleanup behavior configuration + +### Technical Requirements + +**Performance**: Preprocessing should complete within reasonable timeframes (configurable timeouts) +**Security**: Custom scripts run in controlled environment with limited permissions +**Reliability**: Failed preprocessing should not leave system in inconsistent state +**Logging**: Comprehensive logging of all preprocessing steps +**Streaming**: Leverage rclone daemon's streaming capabilities for efficient data transfer + +### User Requirements + +**Configuration UI**: Simple dropdown to select preprocessing type with dynamic form fields +**Status Visibility**: Clear indication of preprocessing status in job monitoring +**Error Handling**: Meaningful error messages for preprocessing failures + +## Design and Implementation + +### Data Model Changes + +**Internal DTO Classes for Validation** (not exposed via GraphQL): +```typescript +import { + IsString, + IsOptional, + IsBoolean, + IsNumber, + IsArray, + IsEnum, + IsPositive, + Min, + Max, + ValidateNested, + IsNotEmpty, + Matches +} from 'class-validator'; +import { Type, Transform } from 'class-transformer'; + +export enum PreprocessType { + NONE = 'none', + ZFS = 'zfs', + FLASH = 'flash', + SCRIPT = 'script' +} + +export class ZfsPreprocessConfigDto { + @IsString() + @IsNotEmpty() + @Matches(/^[a-zA-Z0-9_\-\/]+$/, { message: 'Pool name must contain only alphanumeric characters, underscores, hyphens, and forward slashes' }) + poolName!: string; + + @IsOptional() + @IsString() + @Matches(/^[a-zA-Z0-9_\-]+$/, { message: 'Snapshot name must contain only alphanumeric characters, underscores, and hyphens' }) + snapshotName?: string; + + @IsOptional() + @IsBoolean() + @Transform(({ value }) => value !== false) + streamDirect?: boolean = true; + + @IsOptional() + @IsNumber() + @Min(1) + @Max(9) + compressionLevel?: number; +} + +export class FlashPreprocessConfigDto { + @IsOptional() + @IsString() + @Matches(/^\/[a-zA-Z0-9_\-\/\.]+$/, { message: 'Git path must be an absolute path' }) + gitPath?: string = '/boot/.git'; + + @IsOptional() + @IsNumber() + @Min(1) + @Max(9) + compressionLevel?: number; + + @IsOptional() + @IsBoolean() + @Transform(({ value }) => value !== false) + streamDirect?: boolean = true; + + @IsOptional() + @IsString() + @Matches(/^\/[a-zA-Z0-9_\-\/\.]+$/, { message: 'Local cache path must be an absolute path' }) + localCachePath?: string; + + @IsOptional() + @IsString() + @IsNotEmpty() + commitMessage?: string; + + @IsOptional() + @IsBoolean() + @Transform(({ value }) => value !== false) + includeGitHistory?: boolean = true; +} + +export class ScriptPreprocessConfigDto { + @IsString() + @IsNotEmpty() + @Matches(/^\/[a-zA-Z0-9_\-\/\.]+$/, { message: 'Script path must be an absolute path' }) + scriptPath!: string; + + @IsOptional() + @IsArray() + @IsString({ each: true }) + scriptArgs?: string[]; + + @IsOptional() + @IsString() + @Matches(/^\/[a-zA-Z0-9_\-\/\.]*$/, { message: 'Working directory must be an absolute path' }) + workingDirectory?: string; + + @IsOptional() + @IsNumber() + @IsPositive() + @Max(3600) + timeout?: number; +} + +export class PreprocessConfigDto { + @IsOptional() + @ValidateNested() + @Type(() => ZfsPreprocessConfigDto) + zfs?: ZfsPreprocessConfigDto; + + @IsOptional() + @ValidateNested() + @Type(() => FlashPreprocessConfigDto) + flash?: FlashPreprocessConfigDto; + + @IsOptional() + @ValidateNested() + @Type(() => ScriptPreprocessConfigDto) + script?: ScriptPreprocessConfigDto; +} + +// Internal DTO for service layer validation +export class BackupJobPreprocessDto { + @IsEnum(PreprocessType) + preprocessType!: PreprocessType; + + @IsOptional() + @ValidateNested() + @Type(() => PreprocessConfigDto) + preprocessConfig?: PreprocessConfigDto; + + @IsOptional() + @IsNumber() + @IsPositive() + @Max(3600) + preprocessTimeout?: number = 300; + + @IsOptional() + @IsBoolean() + cleanupOnFailure?: boolean = true; +} + +**Extended BackupJobConfigData Interface** (internal): +```typescript +interface BackupJobConfigData { + // ... existing fields + preprocessType?: 'none' | 'zfs' | 'flash' | 'script'; + preprocessConfig?: { + zfs?: { + poolName: string; + snapshotName?: string; + streamDirect?: boolean; + compressionLevel?: number; + }; + flash?: { + gitPath?: string; + compressionLevel?: number; + streamDirect?: boolean; + localCachePath?: string; + commitMessage?: string; + includeGitHistory?: boolean; + }; + script?: { + scriptPath: string; + scriptArgs?: string[]; + workingDirectory?: string; + timeout?: number; + }; + }; + preprocessTimeout?: number; + cleanupOnFailure?: boolean; +} +``` + +**GraphQL Schema Extensions** (only expose what UI needs): +```typescript +import { InputType, Field, registerEnumType } from '@nestjs/graphql'; +import { GraphQLJSON } from 'graphql-scalars'; + +registerEnumType(PreprocessType, { + name: 'PreprocessType', + description: 'Type of preprocessing to perform before backup' +}); + +// Extend existing BackupJobConfig ObjectType +@ObjectType({ + implements: () => Node, +}) +export class BackupJobConfig extends Node { + // ... existing fields + + @Field(() => PreprocessType, { nullable: true, defaultValue: PreprocessType.NONE }) + preprocessType?: PreprocessType; + + @Field(() => GraphQLJSON, { nullable: true, description: 'Preprocessing configuration' }) + preprocessConfig?: Record; + + @Field(() => Number, { nullable: true, description: 'Preprocessing timeout in seconds' }) + preprocessTimeout?: number; + + @Field(() => Boolean, { nullable: true, description: 'Cleanup on failure' }) + cleanupOnFailure?: boolean; +} + +// Extend existing input types +@InputType() +export class CreateBackupJobConfigInput { + // ... existing fields + + @Field(() => PreprocessType, { nullable: true, defaultValue: PreprocessType.NONE }) + @IsOptional() + @IsEnum(PreprocessType) + preprocessType?: PreprocessType; + + @Field(() => GraphQLJSON, { nullable: true }) + @IsOptional() + @IsObject() + preprocessConfig?: Record; + + @Field(() => Number, { nullable: true, defaultValue: 300 }) + @IsOptional() + @IsNumber() + @IsPositive() + @Max(3600) + preprocessTimeout?: number; + + @Field(() => Boolean, { nullable: true, defaultValue: true }) + @IsOptional() + @IsBoolean() + cleanupOnFailure?: boolean; +} + +@InputType() +export class UpdateBackupJobConfigInput { + // ... existing fields + + @Field(() => PreprocessType, { nullable: true }) + @IsOptional() + @IsEnum(PreprocessType) + preprocessType?: PreprocessType; + + @Field(() => GraphQLJSON, { nullable: true }) + @IsOptional() + @IsObject() + preprocessConfig?: Record; + + @Field(() => Number, { nullable: true }) + @IsOptional() + @IsNumber() + @IsPositive() + @Max(3600) + preprocessTimeout?: number; + + @Field(() => Boolean, { nullable: true }) + @IsOptional() + @IsBoolean() + cleanupOnFailure?: boolean; +} +``` + +**Validation Service for Business Logic**: +```typescript +@Injectable() +export class PreprocessConfigValidationService { + + async validateAndTransform(input: any): Promise { + // Transform to DTO and validate + const dto = plainToClass(BackupJobPreprocessDto, input); + const validationErrors = await validate(dto); + + if (validationErrors.length > 0) { + const errorMessages = validationErrors + .map(error => Object.values(error.constraints || {}).join(', ')) + .join('; '); + throw new BadRequestException(`Validation failed: ${errorMessages}`); + } + + // Custom business logic validation + const businessErrors = this.validateBusinessRules(dto); + if (businessErrors.length > 0) { + throw new BadRequestException(`Configuration errors: ${businessErrors.join('; ')}`); + } + + // Additional async validations + await this.validateAsyncRules(dto); + + return dto; + } + + private validateBusinessRules(dto: BackupJobPreprocessDto): string[] { + const errors: string[] = []; + + // Ensure config matches type + if (dto.preprocessType !== PreprocessType.NONE && !dto.preprocessConfig) { + errors.push('Preprocessing configuration is required when preprocessType is not "none"'); + } + + if (dto.preprocessType === PreprocessType.ZFS && !dto.preprocessConfig?.zfs) { + errors.push('ZFS configuration is required when preprocessType is "zfs"'); + } + + if (dto.preprocessType === PreprocessType.FLASH && !dto.preprocessConfig?.flash) { + errors.push('Flash configuration is required when preprocessType is "flash"'); + } + + if (dto.preprocessType === PreprocessType.SCRIPT && !dto.preprocessConfig?.script) { + errors.push('Script configuration is required when preprocessType is "script"'); + } + + // Flash-specific validations + if (dto.preprocessConfig?.flash) { + const flashConfig = dto.preprocessConfig.flash; + + if (flashConfig.localCachePath && flashConfig.streamDirect !== false) { + errors.push('localCachePath can only be used when streamDirect is false'); + } + + if (flashConfig.gitPath && !flashConfig.gitPath.endsWith('/.git')) { + errors.push('Git path should end with "/.git"'); + } + } + + // ZFS-specific validations + if (dto.preprocessConfig?.zfs) { + const zfsConfig = dto.preprocessConfig.zfs; + + if (zfsConfig.poolName.includes('..') || zfsConfig.poolName.startsWith('/')) { + errors.push('Invalid ZFS pool name format'); + } + } + + // Script-specific validations + if (dto.preprocessConfig?.script) { + const scriptConfig = dto.preprocessConfig.script; + + if (!scriptConfig.scriptPath.match(/\.(sh|py|pl|js)$/)) { + errors.push('Script must have a valid extension (.sh, .py, .pl, .js)'); + } + + if (scriptConfig.scriptArgs?.some(arg => arg.includes(';') || arg.includes('|') || arg.includes('&'))) { + errors.push('Script arguments cannot contain shell operators (;, |, &)'); + } + } + + return errors; + } + + private async validateAsyncRules(dto: BackupJobPreprocessDto): Promise { + if (dto.preprocessType === PreprocessType.ZFS && dto.preprocessConfig?.zfs) { + const poolExists = await this.validateZfsPool(dto.preprocessConfig.zfs.poolName); + if (!poolExists) { + throw new BadRequestException(`ZFS pool '${dto.preprocessConfig.zfs.poolName}' does not exist`); + } + } + + if (dto.preprocessType === PreprocessType.SCRIPT && dto.preprocessConfig?.script) { + const scriptExists = await this.validateScriptExists(dto.preprocessConfig.script.scriptPath); + if (!scriptExists) { + throw new BadRequestException(`Script '${dto.preprocessConfig.script.scriptPath}' does not exist or is not executable`); + } + } + } + + async validateZfsPool(poolName: string): Promise { + // Implementation would check if ZFS pool exists + return true; + } + + async validateScriptExists(scriptPath: string): Promise { + // Implementation would check if script file exists and is executable + return true; + } +} +``` + +**Service Integration**: +```typescript +// In BackupConfigService +constructor( + private readonly rcloneService: RCloneService, + private readonly schedulerRegistry: SchedulerRegistry, + private readonly preprocessValidationService: PreprocessConfigValidationService, + private readonly preprocessingService: PreprocessingService +) { + // ... existing constructor logic +} + +async createBackupJobConfig(input: CreateBackupJobConfigInput): Promise { + // Validate preprocessing config if provided + if (input.preprocessType && input.preprocessType !== PreprocessType.NONE) { + await this.preprocessValidationService.validateAndTransform({ + preprocessType: input.preprocessType, + preprocessConfig: input.preprocessConfig, + preprocessTimeout: input.preprocessTimeout, + cleanupOnFailure: input.cleanupOnFailure + }); + } + + // ... rest of existing logic +} +``` + +**Key Benefits of This Approach**: +- **Separation of Concerns**: Internal DTOs handle validation, GraphQL schema only exposes what UI needs +- **Type Safety**: Full validation on internal DTOs, simple JSON for GraphQL flexibility +- **Minimal GraphQL Changes**: Only add essential fields to existing schema +- **Backward Compatibility**: Existing backup jobs continue to work (preprocessType defaults to 'none') +- **Flexible Configuration**: UI can send any valid JSON, validated internally by DTOs +- **Future-Proof**: Easy to add new preprocessing types without GraphQL schema changes + +### Architecture Overview + +**Preprocessing Service**: New service to handle different preprocessing types +**Streaming Integration**: Direct integration with rclone daemon for streaming operations +**Job Execution Flow**: Modified to include preprocessing step with streaming support +**Cleanup Management**: Automatic cleanup of temporary resources + +### API Specifications + +**New Preprocessing Service Methods**: +- `executePreprocessing(config, jobId): Promise` +- `executeStreamingPreprocessing(config, jobId): Promise` +- `cleanupPreprocessing(config, jobId): Promise` +- `validatePreprocessConfig(config): ValidationResult` + +**PreprocessResult Interface**: +```typescript +interface PreprocessResult { + success: boolean; + outputPath?: string; // Path to the final backup destination + localCachePath?: string; // Path to local cache file (if used) + streaming: boolean; // Whether the operation used streaming + message: string; // Human-readable status message + metadata?: { + gitCommitHash?: string; // For flash backups + snapshotName?: string; // For ZFS backups + scriptExitCode?: number; // For custom scripts + bytesProcessed?: number; + processingTimeMs?: number; + }; +} +``` + +## Development Details + +### Implementation Approach + +**Phase 1**: Core preprocessing infrastructure +- Add preprocessing fields to data models +- Create base preprocessing service +- Implement 'none' type (current behavior) + +**Phase 2**: RCloneApiService streaming extensions +- Add `startStreamingBackup()` method to handle rcat subprocess operations +- Implement streaming job tracking that integrates with existing job system +- Create streaming job status monitoring (bridge subprocess with daemon job tracking) +- Add streaming job cancellation capabilities (process management + cleanup) +- Extend job grouping to include streaming operations under `JOB_GROUP_PREFIX` + +**Phase 3**: Streaming job management integration +- Modify `getAllJobsWithStats()` to include streaming jobs alongside daemon jobs +- Update `getEnhancedJobStatus()` to handle both daemon and streaming job types +- Implement streaming job progress monitoring (file size, transfer rate estimation) +- Add streaming job error handling and retry logic +- Ensure streaming jobs appear in backup job lists with proper status + +**Phase 4**: Flash backup integration (Priority Feature) +- Local git repository setup and configuration +- Git filters and exclusions for proper file handling +- Local commit operations for configuration tracking +- Git repository streaming compression using `tar cf - /boot/.git | rclone rcat remote:backup.tar` +- Direct streaming to destination via rclone daemon +- No temporary local storage required +- Simplified approach without remote git operations or Unraid Connect dependency + +**Phase 5**: ZFS snapshot support +- ZFS snapshot creation/deletion +- Streaming via `zfs send | rclone rcat remote:backup` +- Error handling for ZFS operations +- Cleanup of temporary snapshots + +**Phase 6**: Custom script support +- Script execution in sandboxed environment +- File-based output (non-streaming for security) +- Parameter passing and environment setup +- Security restrictions and validation + +### Streaming Implementation Details + +**ZFS Streaming with RClone Daemon API**: +```typescript +// Use RCloneApiService.startBackup() with streaming source +const zfsCommand = `zfs send pool/dataset@backup-timestamp`; +const destinationPath = `${config.remoteName}:${config.destinationPath}/zfs-backup-timestamp`; + +// Stream ZFS data directly to rclone daemon via API +await this.executeStreamingBackup(zfsCommand, destinationPath, config); +``` + +**Flash Backup Streaming with Complete Git Setup**: +```typescript +// Simplified flash backup preprocessing without remote git operations +async executeFlashBackupPreprocessing(config: FlashBackupConfig, jobId: string): Promise { + try { + // 1. Initialize/configure local git repository (always done) + await this.setupLocalGitRepository(); + + // 2. Configure git filters and exclusions + await this.configureGitFilters(); + + // 3. Perform local git operations (add, commit locally only) + await this.performLocalGitOperations(config.commitMessage || 'Backup via comprehensive backup system'); + + // 4. Create backup - either streaming or local cache + if (config.streamDirect !== false) { + // Stream git repository directly to destination + const tarCommand = `tar cf - -C /boot .git`; + const destinationPath = `${config.remoteName}:${config.destinationPath}/flash-backup-${Date.now()}.tar`; + + await this.executeStreamingBackup(tarCommand, destinationPath, config); + + return { + success: true, + outputPath: destinationPath, + streaming: true, + message: 'Flash backup streamed successfully to destination' + }; + } else { + // Create local backup file first, then upload via rclone + const localCachePath = config.localCachePath || `/tmp/flash-backup-${Date.now()}.tar`; + const destinationPath = `${config.remoteName}:${config.destinationPath}/flash-backup-${Date.now()}.tar`; + + // Create local tar file + await this.executeCommand(`tar cf "${localCachePath}" -C /boot .git`); + + // Upload via standard rclone + await this.executeStandardBackup(localCachePath, destinationPath, config); + + // Cleanup local cache if it was auto-generated + if (!config.localCachePath) { + await this.deleteFile(localCachePath); + } + + return { + success: true, + outputPath: destinationPath, + streaming: false, + localCachePath: config.localCachePath ? localCachePath : undefined, + message: 'Flash backup completed successfully via local cache' + }; + } + + } catch (error) { + this.logger.error(`Flash backup preprocessing failed: ${error.message}`); + throw new Error(`Flash backup failed: ${error.message}`); + } +} + +private async setupLocalGitRepository(): Promise { + // Initialize git repository if needed + if (!await this.fileExists('/boot/.git/info/exclude')) { + await this.executeCommand('git init /boot'); + } + + // Setup git description + const varConfig = await this.readConfigFile('/var/local/emhttp/var.ini'); + const serverName = varConfig?.NAME || 'Unknown Server'; + const gitDescText = `Unraid flash drive for ${serverName}\n`; + const gitDescPath = '/boot/.git/description'; + + if (!await this.fileExists(gitDescPath) || await this.readFile(gitDescPath) !== gitDescText) { + await this.writeFile(gitDescPath, gitDescText); + } + + // Configure git user + await this.setGitConfig('user.email', 'gitbot@unraid.net'); + await this.setGitConfig('user.name', 'gitbot'); +} + +private async performLocalGitOperations(commitMessage: string): Promise { + // Check status + const { stdout: statusOutput } = await this.executeCommand('git -C /boot status --porcelain'); + + let needsCommit = false; + if (statusOutput.trim().length > 0) { + needsCommit = true; + } else { + // Check for uncommitted changes + const { stdout: diffOutput } = await this.executeCommand('git -C /boot diff --cached --name-only', { allowFailure: true }); + if (diffOutput.trim().length > 0) { + needsCommit = true; + } + } + + if (needsCommit) { + // Remove invalid files from repo + const { stdout: invalidFiles } = await this.executeCommand('git -C /boot ls-files --cached --ignored --exclude-standard', { allowFailure: true }); + if (invalidFiles.trim()) { + for (const file of invalidFiles.trim().split('\n')) { + if (file.trim()) { + await this.executeCommand(`git -C /boot rm --cached --ignore-unmatch '${file.trim()}'`); + } + } + } + + // Add and commit changes locally only + await this.executeCommand('git -C /boot add -A'); + await this.executeCommand(`git -C /boot commit -m "${commitMessage}"`); + + this.logger.log('Local git commit completed for flash backup'); + } else { + this.logger.log('No changes detected, skipping git commit'); + } +} + +### Streaming Implementation Details + +**ZFS Streaming with RClone Daemon API**: +```typescript +// Use RCloneApiService.startBackup() with streaming source +const zfsCommand = `zfs send pool/dataset@backup-timestamp`; +const destinationPath = `${config.remoteName}:${config.destinationPath}/zfs-backup-timestamp`; + +// Stream ZFS data directly to rclone daemon via API +await this.executeStreamingBackup(zfsCommand, destinationPath, config); +``` + +**Flash Backup Streaming with RClone Daemon API**: +```typescript +// Stream git archive directly to rclone daemon +const tarCommand = `tar cf - /boot/.git`; +const destinationPath = `${config.remoteName}:${config.destinationPath}/flash-backup-timestamp.tar`; + +await this.executeStreamingBackup(tarCommand, destinationPath, config); +``` + +**Docker Volume Streaming with RClone Daemon API**: +```typescript +// Stop container, stream volume data, restart container +await this.dockerService.stopContainer(config.containerName); +const dockerCommand = `docker run --rm -v ${config.volumeName}:/data alpine tar cf - /data`; +const destinationPath = `${config.remoteName}:${config.destinationPath}/docker-backup-timestamp.tar`; + +await this.executeStreamingBackup(dockerCommand, destinationPath, config); +await this.dockerService.startContainer(config.containerName); +``` + +**Implementation Notes**: +- **Hybrid Approach**: Use direct `rclone rcat` calls for streaming operations, daemon API for everything else +- **Streaming Method**: Direct `rclone rcat` subprocess with piped input from preprocessing commands +- **Job Management**: Leverage existing RCloneApiService for configuration, monitoring, and job tracking +- **Compression Handling**: User configures compress remote in UI, we just use their chosen remote +- **Error Handling**: Combine subprocess error handling with existing RCloneApiService retry logic +- **Process Management**: Proper cleanup of streaming subprocesses and monitoring integration + +**API Integration Points**: +- `RCloneApiService.getRemoteConfig()` for validating user's remote configuration +- `RCloneApiService.getEnhancedJobStatus()` for monitoring progress (if possible to correlate) +- `RCloneApiService.stopJob()` for cancellation (may need custom process management) +- Existing job grouping with `JOB_GROUP_PREFIX` for backup jobs +- Custom subprocess management for streaming operations + +### Subprocess Lifecycle Management + +**Process Tracking**: +```typescript +interface StreamingJobProcess { + jobId: string; + configId: string; + subprocess: ChildProcess; + startTime: Date; + command: string; + destinationPath: string; + status: 'starting' | 'running' | 'completed' | 'failed' | 'cancelled'; + bytesTransferred?: number; + error?: string; +} + +class StreamingJobManager { + private activeProcesses = new Map(); + private readonly logger = new Logger(StreamingJobManager.name); + + async startStreamingJob(command: string, destination: string, configId: string): Promise { + const jobId = `stream-${uuidv4()}`; + const subprocess = spawn('sh', ['-c', `${command} | rclone rcat ${destination}`]); + + const processInfo: StreamingJobProcess = { + jobId, + configId, + subprocess, + startTime: new Date(), + command, + destinationPath: destination, + status: 'starting' + }; + + this.activeProcesses.set(jobId, processInfo); + this.setupProcessHandlers(processInfo); + return jobId; + } + + private setupProcessHandlers(processInfo: StreamingJobProcess): void { + const { subprocess, jobId } = processInfo; + + subprocess.on('spawn', () => { + processInfo.status = 'running'; + this.logger.log(`Streaming job ${jobId} started successfully`); + }); + + subprocess.on('exit', (code, signal) => { + if (signal === 'SIGTERM' || signal === 'SIGKILL') { + processInfo.status = 'cancelled'; + } else if (code === 0) { + processInfo.status = 'completed'; + } else { + processInfo.status = 'failed'; + processInfo.error = `Process exited with code ${code}`; + } + + this.logger.log(`Streaming job ${jobId} finished with status: ${processInfo.status}`); + // Keep process info for status queries, cleanup after timeout + setTimeout(() => this.activeProcesses.delete(jobId), 300000); // 5 minutes + }); + + subprocess.on('error', (error) => { + processInfo.status = 'failed'; + processInfo.error = error.message; + this.logger.error(`Streaming job ${jobId} failed:`, error); + }); + } + + async stopStreamingJob(jobId: string): Promise { + const processInfo = this.activeProcesses.get(jobId); + if (!processInfo || processInfo.status === 'completed' || processInfo.status === 'failed') { + return false; + } + + processInfo.status = 'cancelled'; + processInfo.subprocess.kill('SIGTERM'); + + // Force kill after 10 seconds if still running + setTimeout(() => { + if (!processInfo.subprocess.killed) { + processInfo.subprocess.kill('SIGKILL'); + } + }, 10000); + + return true; + } +} +``` + +**Service Shutdown Cleanup**: +```typescript +async onModuleDestroy(): Promise { + this.logger.log('Cleaning up streaming processes...'); + + const activeJobs = Array.from(this.activeProcesses.values()) + .filter(p => p.status === 'running' || p.status === 'starting'); + + if (activeJobs.length > 0) { + this.logger.log(`Terminating ${activeJobs.length} active streaming jobs`); + + // Graceful termination + activeJobs.forEach(job => job.subprocess.kill('SIGTERM')); + + // Wait up to 5 seconds for graceful shutdown + await new Promise(resolve => setTimeout(resolve, 5000)); + + // Force kill any remaining processes + activeJobs.forEach(job => { + if (!job.subprocess.killed) { + job.subprocess.kill('SIGKILL'); + } + }); + } +} +``` + +### Job Status Correlation + +**Unified Job Status System**: +```typescript +interface UnifiedJobStatus { + id: string; + type: 'daemon' | 'streaming'; + configId?: string; + status: 'running' | 'completed' | 'failed' | 'cancelled'; + progress?: { + bytesTransferred: number; + totalBytes?: number; + transferRate: number; + eta?: number; + }; + startTime: Date; + endTime?: Date; + error?: string; +} + +async getAllJobsWithStats(): Promise { + // Get existing daemon jobs + const daemonJobs = await this.getExistingDaemonJobs(); + + // Get streaming jobs and convert to RCloneJob format + const streamingJobs = Array.from(this.streamingManager.activeProcesses.values()) + .filter(p => p.status === 'running' || p.status === 'starting') + .map(p => this.convertStreamingToRCloneJob(p)); + + return [...daemonJobs, ...streamingJobs]; +} + +private convertStreamingToRCloneJob(processInfo: StreamingJobProcess): RCloneJob { + return { + id: processInfo.jobId, + configId: processInfo.configId, + status: this.mapStreamingStatus(processInfo.status), + group: `${JOB_GROUP_PREFIX}${processInfo.configId}`, + startTime: processInfo.startTime.toISOString(), + stats: { + bytes: processInfo.bytesTransferred || 0, + speed: this.estimateTransferRate(processInfo), + eta: null, // Streaming jobs don't have reliable ETA + transferring: processInfo.status === 'running' ? [processInfo.destinationPath] : [], + checking: [], + errors: processInfo.error ? 1 : 0, + fatalError: processInfo.status === 'failed', + finished: processInfo.status === 'completed' || processInfo.status === 'failed' + } + }; +} +``` + +**Progress Monitoring for Streaming Jobs**: +```typescript +private estimateTransferRate(processInfo: StreamingJobProcess): number { + if (!processInfo.bytesTransferred || processInfo.status !== 'running') { + return 0; + } + + const elapsedSeconds = (Date.now() - processInfo.startTime.getTime()) / 1000; + return elapsedSeconds > 0 ? processInfo.bytesTransferred / elapsedSeconds : 0; +} + +// Monitor subprocess output to track progress +private setupProgressMonitoring(processInfo: StreamingJobProcess): void { + let lastProgressUpdate = Date.now(); + + processInfo.subprocess.stderr?.on('data', (data) => { + const output = data.toString(); + + // Parse rclone progress output (if available) + const progressMatch = output.match(/Transferred:\s+(\d+(?:\.\d+)?)\s*(\w+)/); + if (progressMatch) { + const [, amount, unit] = progressMatch; + processInfo.bytesTransferred = this.parseBytes(amount, unit); + lastProgressUpdate = Date.now(); + } + }); + + // Fallback: estimate progress based on time for jobs without progress output + const progressEstimator = setInterval(() => { + if (processInfo.status !== 'running') { + clearInterval(progressEstimator); + return; + } + + // If no progress updates for 30 seconds, job might be stalled + if (Date.now() - lastProgressUpdate > 30000) { + this.logger.warn(`No progress updates for streaming job ${processInfo.jobId} for 30 seconds`); + } + }, 10000); +} +``` + +### Error Recovery and Retry Logic + +**Streaming-Specific Error Handling**: +```typescript +async executeStreamingBackup(command: string, destination: string, config: any): Promise { + const maxRetries = 3; + let attempt = 0; + + while (attempt < maxRetries) { + try { + const jobId = await this.streamingManager.startStreamingJob(command, destination, config.id); + await this.waitForStreamingCompletion(jobId); + return; // Success + + } catch (error) { + attempt++; + this.logger.warn(`Streaming backup attempt ${attempt} failed:`, error); + + if (attempt >= maxRetries) { + throw new Error(`Streaming backup failed after ${maxRetries} attempts: ${error.message}`); + } + + // Exponential backoff + const delay = Math.min(1000 * Math.pow(2, attempt - 1), 30000); + await new Promise(resolve => setTimeout(resolve, delay)); + } + } +} + +private async waitForStreamingCompletion(jobId: string): Promise { + return new Promise((resolve, reject) => { + const checkStatus = () => { + const processInfo = this.streamingManager.activeProcesses.get(jobId); + + if (!processInfo) { + reject(new Error(`Streaming job ${jobId} not found`)); + return; + } + + switch (processInfo.status) { + case 'completed': + resolve(); + break; + case 'failed': + reject(new Error(processInfo.error || 'Streaming job failed')); + break; + case 'cancelled': + reject(new Error('Streaming job was cancelled')); + break; + default: + // Still running, check again in 1 second + setTimeout(checkStatus, 1000); + } + }; + + checkStatus(); + }); +} + +// Handle partial stream failures +private async handleStreamingFailure(processInfo: StreamingJobProcess): Promise { + this.logger.error(`Streaming job ${processInfo.jobId} failed, attempting cleanup`); + + // Kill subprocess if still running + if (!processInfo.subprocess.killed) { + processInfo.subprocess.kill('SIGTERM'); + } + + // Check if partial data was uploaded and needs cleanup + try { + // Attempt to remove partial upload from destination + await this.cleanupPartialUpload(processInfo.destinationPath); + } catch (cleanupError) { + this.logger.warn(`Failed to cleanup partial upload: ${cleanupError.message}`); + } +} +``` + +### Concurrency Management + +**Resource Limits and Throttling**: +```typescript +interface ConcurrencyConfig { + maxConcurrentStreaming: number; + maxConcurrentPerConfig: number; + maxTotalBandwidth: number; // bytes per second + queueTimeout: number; // milliseconds +} + +class ConcurrencyManager { + private readonly config: ConcurrencyConfig = { + maxConcurrentStreaming: 3, + maxConcurrentPerConfig: 1, + maxTotalBandwidth: 100 * 1024 * 1024, // 100 MB/s + queueTimeout: 300000 // 5 minutes + }; + + private readonly jobQueue: Array<{ + configId: string; + command: string; + destination: string; + resolve: (jobId: string) => void; + reject: (error: Error) => void; + queuedAt: Date; + }> = []; + + async queueStreamingJob(command: string, destination: string, configId: string): Promise { + // Check immediate availability + if (this.canStartImmediately(configId)) { + return this.streamingManager.startStreamingJob(command, destination, configId); + } + + // Queue the job + return new Promise((resolve, reject) => { + this.jobQueue.push({ + configId, + command, + destination, + resolve, + reject, + queuedAt: new Date() + }); + + // Set timeout for queued job + setTimeout(() => { + const index = this.jobQueue.findIndex(job => job.resolve === resolve); + if (index !== -1) { + this.jobQueue.splice(index, 1); + reject(new Error('Job timed out in queue')); + } + }, this.config.queueTimeout); + + this.processQueue(); + }); + } + + private canStartImmediately(configId: string): boolean { + const activeJobs = Array.from(this.streamingManager.activeProcesses.values()) + .filter(p => p.status === 'running' || p.status === 'starting'); + + // Check global concurrent limit + if (activeJobs.length >= this.config.maxConcurrentStreaming) { + return false; + } + + // Check per-config limit + const configJobs = activeJobs.filter(p => p.configId === configId); + if (configJobs.length >= this.config.maxConcurrentPerConfig) { + return false; + } + + // Check bandwidth usage + const totalBandwidth = activeJobs.reduce((sum, job) => + sum + this.estimateTransferRate(job), 0); + if (totalBandwidth >= this.config.maxTotalBandwidth) { + return false; + } + + return true; + } + + private async processQueue(): Promise { + while (this.jobQueue.length > 0) { + const job = this.jobQueue[0]; + + // Remove expired jobs + if (Date.now() - job.queuedAt.getTime() > this.config.queueTimeout) { + this.jobQueue.shift(); + job.reject(new Error('Job expired in queue')); + continue; + } + + if (this.canStartImmediately(job.configId)) { + this.jobQueue.shift(); + try { + const jobId = await this.streamingManager.startStreamingJob( + job.command, + job.destination, + job.configId + ); + job.resolve(jobId); + } catch (error) { + job.reject(error); + } + } else { + break; // Can't start any more jobs right now + } + } + } + + // Called when streaming jobs complete to process queue + onStreamingJobComplete(): void { + this.processQueue(); + } +} +``` + +**Integration with Existing Job Grouping**: +```typescript +// Extend existing job grouping to include streaming operations +async stopJob(jobId: string): Promise { + // Check if this is a streaming job + if (jobId.startsWith('stream-')) { + const success = await this.streamingManager.stopStreamingJob(jobId); + return { + stopped: success ? [jobId] : [], + errors: success ? [] : [`Failed to stop streaming job ${jobId}`] + }; + } + + // Handle daemon jobs and groups as before + if (jobId.startsWith(JOB_GROUP_PREFIX)) { + // Stop all jobs in the group (both daemon and streaming) + const groupJobs = await this.getJobsInGroup(jobId); + const results = await Promise.allSettled( + groupJobs.map(job => this.stopJob(job.id)) + ); + + return this.aggregateStopResults(results); + } + + // Regular daemon job + return this.executeJobOperation([jobId], 'stop'); +} +``` + +## Testing Approach + +### Test Cases + +**Unit Tests**: +- Preprocessing service methods +- Configuration validation +- Error handling scenarios +- Streaming pipeline validation + +**Integration Tests**: +- End-to-end backup workflows with preprocessing +- ZFS snapshot streaming operations +- Docker container management with streaming +- Flash backup streaming compression +- Rclone daemon integration + +**Edge Cases**: +- Network failures during streaming +- ZFS snapshot creation failures +- Docker container stop/start failures +- Permission issues with ZFS/Docker operations +- Malformed custom scripts +- Streaming interruption and recovery + +### Acceptance Criteria + +1. User can select preprocessing type in backup job configuration +2. ZFS snapshot backups stream directly to destination without local storage +3. Flash backup streams compressed archive directly to destination +4. Docker containers are safely stopped/started with volume data streamed +5. Custom scripts execute with proper error handling (file-based output) +6. All streaming operations respect timeout settings +7. Failed preprocessing operations clean up properly (including snapshots) +8. Job status accurately reflects preprocessing progress +9. Streaming operations show real-time progress + +## Future Considerations + +### Scalability Plans +- Support for multiple preprocessing steps per job +- Parallel preprocessing for multiple backup sources +- Preprocessing step templates and sharing +- Advanced streaming compression algorithms + +### Enhancement Ideas +- Database dump preprocessing with streaming (MySQL/PostgreSQL) +- VM snapshot integration with streaming +- Network share mounting/unmounting +- Encryption preprocessing steps +- Multi-stream parallel processing + +### Known Limitations +- Custom scripts limited to file-based operations (no streaming for security) +- ZFS operations require appropriate system permissions +- Docker operations require Docker daemon access +- Streaming operations require sufficient network bandwidth for real-time processing +- Streaming failures may require full restart (no partial resume capability) + +## Migration from UpdateFlashBackup.php + +### Replacement Strategy + +**Local Git Repository Management**: +- Local git repository initialization and configuration +- Git filters and exclusions setup for proper file handling +- Local commit operations to track configuration changes +- Streaming backup of git repository without remote synchronization +- No Unraid Connect authentication or remote git push operations + +**Simplified Approach**: +- Focus on local git repository preparation and streaming +- Remove dependency on Unraid Connect for backup operations +- Maintain git history locally for configuration tracking +- Stream entire git repository to backup destination +- Preserve existing UpdateFlashBackup.php for users who need remote sync + +**Enhanced Features**: +- Integration with comprehensive backup job system +- Unified monitoring and status reporting +- Streaming capabilities for faster, more efficient backups +- Better error handling and retry logic +- Consistent logging and debugging across all backup types + +**Migration Steps**: +1. Implement local git preprocessing in backup system +2. Add UI option to use new local flash backup method +3. Test new system alongside existing UpdateFlashBackup.php +4. Allow users to choose between local backup and remote sync +5. Maintain both options for different use cases + +**Configuration Mapping**: +```typescript +// Legacy UpdateFlashBackup.php (for remote sync) +const legacyFlashBackup = { + command: 'update', + commitmsg: 'Config change' +}; + +// New local preprocessing configuration +const newLocalFlashBackup: BackupJobConfigData = { + preprocessType: 'flash', + preprocessConfig: { + flash: { + gitPath: '/boot/.git', + streamDirect: true, + commitMessage: 'Config change', + includeGitHistory: true + } + }, + // ... other backup job config +}; + +// Alternative with local cache +const newLocalFlashBackupWithCache: BackupJobConfigData = { + preprocessType: 'flash', + preprocessConfig: { + flash: { + gitPath: '/boot/.git', + streamDirect: false, + localCachePath: '/mnt/cache/flash-backup.tar', + commitMessage: 'Config change', + includeGitHistory: true + } + }, + // ... other backup job config +}; +``` + +**Benefits of Local Approach**: +- No dependency on Unraid Connect for backup operations +- Faster backup process without remote authentication +- Unified backup system for all backup types +- Streaming capabilities reduce local storage requirements (when streamDirect=true) +- Local cache option for scenarios requiring intermediate storage +- Better integration with existing backup monitoring +- Consistent error handling and retry logic +``` + +**Validation Usage in Services**: +```typescript +import { Injectable, BadRequestException } from '@nestjs/common'; +import { validate } from 'class-validator'; +import { plainToClass } from 'class-transformer'; + +@Injectable() +export class BackupConfigService { + constructor( + private readonly validationService: PreprocessConfigValidationService + ) {} + + async validateAndCreateBackupJob(input: any): Promise { + // Transform and validate DTO + const dto = plainToClass(BackupJobPreprocessDto, input); + const validationErrors = await validate(dto); + + if (validationErrors.length > 0) { + const errorMessages = validationErrors + .map(error => Object.values(error.constraints || {}).join(', ')) + .join('; '); + throw new BadRequestException(`Validation failed: ${errorMessages}`); + } + + // Custom business logic validation + const businessErrors = this.validationService.validateConfig(dto); + if (businessErrors.length > 0) { + throw new BadRequestException(`Configuration errors: ${businessErrors.join('; ')}`); + } + + // Additional async validations + const poolExists = await this.validationService.validateZfsPool(dto.preprocessConfig?.zfs?.poolName || ''); + if (!poolExists) { + throw new BadRequestException(`ZFS pool '${dto.preprocessConfig?.zfs?.poolName}' does not exist`); + } + + if (dto.preprocessType === PreprocessType.SCRIPT && dto.preprocessConfig?.script) { + const scriptExists = await this.validationService.validateScriptExists(dto.preprocessConfig.script.scriptPath); + if (!scriptExists) { + throw new BadRequestException(`Script '${dto.preprocessConfig.script.scriptPath}' does not exist or is not executable`); + } + } + + // Convert DTO to domain model + return this.convertDtoToModel(dto); + } + + private convertDtoToModel(dto: BackupJobPreprocessDto): BackupJobConfig { + // Implementation to convert validated DTO to internal model + return { + preprocessType: dto.preprocessType, + preprocessConfig: dto.preprocessConfig, + preprocessTimeout: dto.preprocessTimeout, + cleanupOnFailure: dto.cleanupOnFailure + } as BackupJobConfig; + } +} + +// GraphQL Resolver with validation +@Resolver() +export class BackupJobResolver { + constructor( + private readonly backupConfigService: BackupConfigService + ) {} + + @Mutation(() => BackupJobConfig) + async createBackupJob( + @Args('input') input: BackupJobPreprocessInput + ): Promise { + return this.backupConfigService.validateAndCreateBackupJob(input); + } + + @Mutation(() => BackupJobConfig) + async updateBackupJob( + @Args('id') id: string, + @Args('input') input: Partial + ): Promise { + // Merge with existing config and validate + const existingConfig = await this.getExistingConfig(id); + const mergedInput = { ...existingConfig, ...input }; + return this.backupConfigService.validateAndCreateBackupJob(mergedInput); + } +} + +// Validation pipe for automatic DTO validation +import { ValidationPipe } from '@nestjs/common'; + +// In main.ts or module configuration +app.useGlobalPipes(new ValidationPipe({ + transform: true, + whitelist: true, + forbidNonWhitelisted: true, + validateCustomDecorators: true +})); +``` + +
\ No newline at end of file diff --git a/.bivvy/m9X4-moves.json b/.bivvy/m9X4-moves.json new file mode 100644 index 000000000..d52b85ee8 --- /dev/null +++ b/.bivvy/m9X4-moves.json @@ -0,0 +1,180 @@ +{ + "climb": "m9X4", + "moves": [ + { + "status": "complete", + "description": "Create preprocessing types and validation DTOs", + "details": "Create the core preprocessing types, enums, and validation DTOs as specified in the climb document. This includes PreprocessType enum, validation classes for ZFS, Flash, and Script configurations, and the main PreprocessConfigDto classes.", + "files": [ + "api/src/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.ts" + ] + }, + { + "status": "complete", + "description": "Extend backup job data models with preprocessing fields", + "details": "Add preprocessing fields to the BackupJobConfig GraphQL model and input types. Include preprocessType, preprocessConfig, preprocessTimeout, and cleanupOnFailure fields with proper GraphQL decorators and validation.", + "files": [ + "api/src/unraid-api/graph/resolvers/backup/backup.model.ts" + ] + }, + { + "status": "complete", + "description": "Update BackupJobConfigData interface with preprocessing fields", + "details": "Extend the BackupJobConfigData interface to include the new preprocessing fields and update the mapToGraphQL method to handle the new fields.", + "files": [ + "api/src/unraid-api/graph/resolvers/backup/backup-config.service.ts" + ] + }, + { + "status": "complete", + "description": "Create preprocessing validation service", + "details": "Implement the PreprocessConfigValidationService with business logic validation, async validation for ZFS pools and scripts, and transformation methods as detailed in the climb document.", + "files": [ + "api/src/unraid-api/graph/resolvers/backup/preprocessing/preprocessing-validation.service.ts" + ], + "rest": true + }, + { + "status": "complete", + "description": "Create streaming job manager", + "details": "Implement the StreamingJobManager class to handle subprocess lifecycle management, process tracking, progress monitoring, and cleanup for streaming operations like ZFS and Flash backups.", + "files": [ + "api/src/unraid-api/graph/resolvers/backup/preprocessing/streaming-job-manager.service.ts" + ] + }, + { + "status": "complete", + "description": "Create core preprocessing service", + "details": "Implement the main PreprocessingService with methods for executing different preprocessing types, handling streaming operations, and managing cleanup. Include the PreprocessResult interface and core execution logic.", + "files": [ + "api/src/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.service.ts" + ] + }, + { + "status": "complete", + "description": "Extend RClone API service with streaming capabilities", + "details": "Add streaming backup methods to RCloneApiService including startStreamingBackup, streaming job tracking integration, and unified job status management for both daemon and streaming jobs.", + "files": [ + "api/src/unraid-api/graph/resolvers/rclone/rclone-api.service.ts" + ], + "rest": true + }, + { + "status": "complete", + "description": "Create ZFS preprocessing implementation", + "details": "Implement ZFS-specific preprocessing including snapshot creation, streaming via `zfs send | rclone rcat`, snapshot cleanup, and error handling for ZFS operations.", + "files": [ + "api/src/unraid-api/graph/resolvers/backup/preprocessing/zfs-preprocessing.service.ts" + ] + }, + { + "status": "complete", + "description": "Create Flash backup preprocessing implementation", + "details": "Implement Flash backup preprocessing with local git repository setup, git operations, and streaming via `tar cf - /boot/.git | rclone rcat` as detailed in the climb document.", + "files": [ + "api/src/unraid-api/graph/resolvers/backup/preprocessing/flash-preprocessing.service.ts" + ] + }, + { + "status": "complete", + "description": "Create custom script preprocessing implementation", + "details": "Implement custom script preprocessing with sandboxed execution, parameter passing, timeout handling, and file-based output (non-streaming for security).", + "files": [ + "api/src/unraid-api/graph/resolvers/backup/preprocessing/script-preprocessing.service.ts" + ] + }, + { + "status": "complete", + "description": "Update backup config service with preprocessing integration", + "details": "Integrate preprocessing validation and execution into the backup config service. Update createBackupJobConfig, updateBackupJobConfig, and executeBackupJob methods to handle preprocessing.", + "files": [ + "api/src/unraid-api/graph/resolvers/backup/backup-config.service.ts" + ] + }, + { + "status": "complete", + "description": "Update backup module with new services", + "details": "Add all new preprocessing services to the BackupModule providers array and ensure proper dependency injection setup.", + "files": [ + "api/src/unraid-api/graph/resolvers/backup/backup.module.ts" + ], + "rest": true + }, + { + "status": "complete", + "description": "Update web GraphQL queries and fragments", + "details": "Add preprocessing fields to the BACKUP_JOB_CONFIG_FRAGMENT and update mutations to include the new preprocessing configuration fields.", + "files": [ + "web/components/Backup/backup-jobs.query.ts" + ] + }, + { + "status": "todo", + "description": "Create preprocessing UI components", + "details": "Create Vue component for preprocessing configuration with dropdown for preprocessing type selection and dynamic form fields for each preprocessing type (ZFS, Flash, Script).", + "files": [ + "web/components/Backup/PreprocessingConfig.vue" + ] + }, + { + "status": "todo", + "description": "Update backup job form component", + "details": "Integrate the PreprocessingConfig component into the backup job form and handle preprocessing configuration state management.", + "files": [ + "web/components/Backup/BackupJobForm.vue" + ] + }, + { + "status": "todo", + "description": "Update backup job list component", + "details": "Add preprocessing status indicators to the backup job list and show preprocessing type and status information.", + "files": [ + "web/components/Backup/BackupJobList.vue" + ] + }, + { + "status": "todo", + "description": "Create preprocessing status monitoring", + "details": "Create component to display preprocessing progress, streaming status, and error messages with real-time updates.", + "files": [ + "web/components/Backup/PreprocessingStatus.vue" + ], + "rest": true + }, + { + "status": "skip", + "description": "Add preprocessing tests", + "details": "Create comprehensive unit tests for all preprocessing services including validation, execution, streaming operations, and error handling scenarios.", + "files": [ + "api/src/__test__/preprocessing/preprocessing.service.spec.ts", + "api/src/__test__/preprocessing/zfs-preprocessing.service.spec.ts", + "api/src/__test__/preprocessing/flash-preprocessing.service.spec.ts", + "api/src/__test__/preprocessing/streaming-job-manager.spec.ts" + ] + }, + { + "status": "skip", + "description": "Add integration tests", + "details": "Create integration tests for end-to-end backup workflows with preprocessing, including ZFS snapshot streaming, Flash backup streaming, and error recovery scenarios.", + "files": [ + "api/src/__test__/backup/backup-preprocessing-integration.spec.ts" + ] + }, + { + "status": "skip", + "description": "Update documentation", + "details": "Create comprehensive documentation for the preprocessing system including configuration examples, troubleshooting guide, and API reference.", + "files": [ + "api/docs/backup-preprocessing.md" + ] + }, + { + "status": "skip", + "description": "Add preprocessing configuration examples", + "details": "Provide example configurations for each preprocessing type to help users understand the configuration options and best practices.", + "files": [ + "api/docs/examples/preprocessing-configs.json" + ] + } + ] +} \ No newline at end of file diff --git a/api/dev/Unraid.net/myservers.cfg b/api/dev/Unraid.net/myservers.cfg index 1f8d0bb03..1c664dad2 100644 --- a/api/dev/Unraid.net/myservers.cfg +++ b/api/dev/Unraid.net/myservers.cfg @@ -1,5 +1,5 @@ [api] -version="4.8.0" +version="4.4.1" extraOrigins="https://google.com,https://test.com" [local] sandbox="yes" diff --git a/api/dev/api/backup/backup-jobs.json b/api/dev/api/backup/backup-jobs.json index 70fbc512d..3564e02b1 100644 --- a/api/dev/api/backup/backup-jobs.json +++ b/api/dev/api/backup/backup-jobs.json @@ -9,9 +9,9 @@ "enabled": false, "rcloneOptions": {}, "createdAt": "2025-05-24T12:19:29.150Z", - "updatedAt": "2025-05-25T01:21:35.110Z", - "lastRunStatus": "Started with job ID: 51", - "currentJobId": 51, - "lastRunAt": "2025-05-25T01:21:35.111Z" + "updatedAt": "2025-05-26T16:14:13.977Z", + "lastRunStatus": "Started with job ID: 34", + "currentJobId": 34, + "lastRunAt": "2025-05-26T16:14:13.977Z" } ] \ No newline at end of file diff --git a/api/dev/configs/connect.json b/api/dev/configs/connect.json index 7ec3d55f8..e69de29bb 100644 --- a/api/dev/configs/connect.json +++ b/api/dev/configs/connect.json @@ -1,3 +0,0 @@ -{ - "demo": "hello.unraider" -} \ No newline at end of file diff --git a/api/generated-schema-new.graphql b/api/generated-schema-new.graphql deleted file mode 100644 index 8daa7f161..000000000 --- a/api/generated-schema-new.graphql +++ /dev/null @@ -1,1563 +0,0 @@ -# ------------------------------------------------------ -# THIS FILE WAS AUTOMATICALLY GENERATED (DO NOT MODIFY) -# ------------------------------------------------------ - -type ApiKeyResponse { - valid: Boolean! - error: String -} - -type MinigraphqlResponse { - status: MinigraphStatus! - timeout: Int - error: String -} - -enum MinigraphStatus { - PRE_INIT - CONNECTING - CONNECTED - PING_FAILURE - ERROR_RETRYING -} - -type CloudResponse { - status: String! - ip: String - error: String -} - -type RelayResponse { - status: String! - timeout: String - error: String -} - -type Cloud { - error: String - apiKey: ApiKeyResponse! - relay: RelayResponse - minigraphql: MinigraphqlResponse! - cloud: CloudResponse! - allowedOrigins: [String!]! -} - -type Capacity { - """Free capacity""" - free: String! - - """Used capacity""" - used: String! - - """Total capacity""" - total: String! -} - -type ArrayCapacity { - """Capacity in kilobytes""" - kilobytes: Capacity! - - """Capacity in number of disks""" - disks: Capacity! -} - -type ArrayDisk implements Node { - """Disk identifier, only set for present disks on the system""" - id: ID! - - """ - Array slot number. Parity1 is always 0 and Parity2 is always 29. Array slots will be 1 - 28. Cache slots are 30 - 53. Flash is 54. - """ - idx: Int! - name: String - device: String - - """(KB) Disk Size total""" - size: Long - status: ArrayDiskStatus - - """Is the disk a HDD or SSD.""" - rotational: Boolean - - """Disk temp - will be NaN if array is not started or DISK_NP""" - temp: Int - - """ - Count of I/O read requests sent to the device I/O drivers. These statistics may be cleared at any time. - """ - numReads: Long - - """ - Count of I/O writes requests sent to the device I/O drivers. These statistics may be cleared at any time. - """ - numWrites: Long - - """ - Number of unrecoverable errors reported by the device I/O drivers. Missing data due to unrecoverable array read errors is filled in on-the-fly using parity reconstruct (and we attempt to write this data back to the sector(s) which failed). Any unrecoverable write error results in disabling the disk. - """ - numErrors: Long - - """(KB) Total Size of the FS (Not present on Parity type drive)""" - fsSize: Long - - """(KB) Free Size on the FS (Not present on Parity type drive)""" - fsFree: Long - - """(KB) Used Size on the FS (Not present on Parity type drive)""" - fsUsed: Long - exportable: Boolean - - """Type of Disk - used to differentiate Cache / Flash / Array / Parity""" - type: ArrayDiskType! - - """(%) Disk space left to warn""" - warning: Int - - """(%) Disk space left for critical""" - critical: Int - - """File system type for the disk""" - fsType: String - - """User comment on disk""" - comment: String - - """File format (ex MBR: 4KiB-aligned)""" - format: String - - """ata | nvme | usb | (others)""" - transport: String - color: ArrayDiskFsColor -} - -interface Node { - id: ID! -} - -"""The `Long` scalar type represents 52-bit integers""" -scalar Long - -enum ArrayDiskStatus { - DISK_NP - DISK_OK - DISK_NP_MISSING - DISK_INVALID - DISK_WRONG - DISK_DSBL - DISK_NP_DSBL - DISK_DSBL_NEW - DISK_NEW -} - -enum ArrayDiskType { - DATA - PARITY - FLASH - CACHE -} - -enum ArrayDiskFsColor { - GREEN_ON - GREEN_BLINK - BLUE_ON - BLUE_BLINK - YELLOW_ON - YELLOW_BLINK - RED_ON - RED_OFF - GREY_OFF -} - -type UnraidArray implements Node { - id: ID! - - """Array state before this query/mutation""" - previousState: ArrayState - - """Array state after this query/mutation""" - pendingState: ArrayPendingState - - """Current array state""" - state: ArrayState! - - """Current array capacity""" - capacity: ArrayCapacity! - - """Current boot disk""" - boot: ArrayDisk - - """Parity disks in the current array""" - parities: [ArrayDisk!]! - - """Data disks in the current array""" - disks: [ArrayDisk!]! - - """Caches in the current array""" - caches: [ArrayDisk!]! -} - -enum ArrayState { - STARTED - STOPPED - NEW_ARRAY - RECON_DISK - DISABLE_DISK - SWAP_DSBL - INVALID_EXPANSION - PARITY_NOT_BIGGEST - TOO_MANY_MISSING_DISKS - NEW_DISK_TOO_SMALL - NO_DATA_DISKS -} - -enum ArrayPendingState { - STARTING - STOPPING - NO_DATA_DISKS - TOO_MANY_MISSING_DISKS -} - -type Share implements Node { - id: ID! - - """Display name""" - name: String - - """(KB) Free space""" - free: Long - - """(KB) Used Size""" - used: Long - - """(KB) Total size""" - size: Long - - """Disks that are included in this share""" - include: [String!] - - """Disks that are excluded from this share""" - exclude: [String!] - - """Is this share cached""" - cache: Boolean - - """Original name""" - nameOrig: String - - """User comment""" - comment: String - - """Allocator""" - allocator: String - - """Split level""" - splitLevel: String - - """Floor""" - floor: String - - """COW""" - cow: String - - """Color""" - color: String - - """LUKS status""" - luksStatus: String -} - -type RemoteAccess { - """The type of WAN access used for Remote Access""" - accessType: WAN_ACCESS_TYPE! - - """The type of port forwarding used for Remote Access""" - forwardType: WAN_FORWARD_TYPE - - """The port used for Remote Access""" - port: Int -} - -enum WAN_ACCESS_TYPE { - DYNAMIC - ALWAYS - DISABLED -} - -enum WAN_FORWARD_TYPE { - UPNP - STATIC -} - -type DynamicRemoteAccessStatus { - """The type of dynamic remote access that is enabled""" - enabledType: DynamicRemoteAccessType! - - """The type of dynamic remote access that is currently running""" - runningType: DynamicRemoteAccessType! - - """Any error message associated with the dynamic remote access""" - error: String -} - -enum DynamicRemoteAccessType { - STATIC - UPNP - DISABLED -} - -type ConnectSettingsValues { - """ - If true, the GraphQL sandbox is enabled and available at /graphql. If false, the GraphQL sandbox is disabled and only the production API will be available. - """ - sandbox: Boolean! - - """A list of origins allowed to interact with the API""" - extraOrigins: [String!]! - - """The type of WAN access used for Remote Access""" - accessType: WAN_ACCESS_TYPE! - - """The type of port forwarding used for Remote Access""" - forwardType: WAN_FORWARD_TYPE - - """The port used for Remote Access""" - port: Int - - """A list of Unique Unraid Account ID's""" - ssoUserIds: [String!]! -} - -type ConnectSettings implements Node { - """The unique identifier for the Connect settings""" - id: ID! - - """The data schema for the Connect settings""" - dataSchema: JSON! - - """The UI schema for the Connect settings""" - uiSchema: JSON! - - """The values for the Connect settings""" - values: ConnectSettingsValues! -} - -""" -The `JSON` scalar type represents JSON values as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf). -""" -scalar JSON @specifiedBy(url: "http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf") - -type Connect implements Node { - """The unique identifier for the Connect instance""" - id: ID! - - """The status of dynamic remote access""" - dynamicRemoteAccess: DynamicRemoteAccessStatus! - - """The settings for the Connect instance""" - settings: ConnectSettings! -} - -type AccessUrl { - type: URL_TYPE! - name: String - ipv4: URL - ipv6: URL -} - -enum URL_TYPE { - LAN - WIREGUARD - WAN - MDNS - OTHER - DEFAULT -} - -""" -A field whose value conforms to the standard URL format as specified in RFC3986: https://www.ietf.org/rfc/rfc3986.txt. -""" -scalar URL - -type Network implements Node { - id: ID! - accessUrls: [AccessUrl!] -} - -type ProfileModel { - userId: ID - username: String! - url: String! - avatar: String! -} - -type Server { - owner: ProfileModel! - guid: String! - apikey: String! - name: String! - status: ServerStatus! - wanip: String! - lanip: String! - localurl: String! - remoteurl: String! -} - -enum ServerStatus { - ONLINE - OFFLINE - NEVER_CONNECTED -} - -type DiskPartition { - """The name of the partition""" - name: String! - - """The filesystem type of the partition""" - fsType: DiskFsType! - - """The size of the partition in bytes""" - size: Float! -} - -"""The type of filesystem on the disk partition""" -enum DiskFsType { - XFS - BTRFS - VFAT - ZFS - EXT4 - NTFS -} - -type Disk { - """The unique identifier of the disk""" - id: String! - - """The device path of the disk (e.g. /dev/sdb)""" - device: String! - - """The type of disk (e.g. SSD, HDD)""" - type: String! - - """The model name of the disk""" - name: String! - - """The manufacturer of the disk""" - vendor: String! - - """The total size of the disk in bytes""" - size: Float! - - """The number of bytes per sector""" - bytesPerSector: Float! - - """The total number of cylinders on the disk""" - totalCylinders: Float! - - """The total number of heads on the disk""" - totalHeads: Float! - - """The total number of sectors on the disk""" - totalSectors: Float! - - """The total number of tracks on the disk""" - totalTracks: Float! - - """The number of tracks per cylinder""" - tracksPerCylinder: Float! - - """The number of sectors per track""" - sectorsPerTrack: Float! - - """The firmware revision of the disk""" - firmwareRevision: String! - - """The serial number of the disk""" - serialNum: String! - - """The interface type of the disk""" - interfaceType: DiskInterfaceType! - - """The SMART status of the disk""" - smartStatus: DiskSmartStatus! - - """The current temperature of the disk in Celsius""" - temperature: Float - - """The partitions on the disk""" - partitions: [DiskPartition!]! -} - -"""The type of interface the disk uses to connect to the system""" -enum DiskInterfaceType { - SAS - SATA - USB - PCIE - UNKNOWN -} - -""" -The SMART (Self-Monitoring, Analysis and Reporting Technology) status of the disk -""" -enum DiskSmartStatus { - OK - UNKNOWN -} - -type KeyFile { - location: String - contents: String -} - -type Registration { - guid: ID - type: registrationType - keyFile: KeyFile - state: RegistrationState - expiration: String - updateExpiration: String -} - -enum registrationType { - BASIC - PLUS - PRO - STARTER - UNLEASHED - LIFETIME - INVALID - TRIAL -} - -enum RegistrationState { - TRIAL - BASIC - PLUS - PRO - STARTER - UNLEASHED - LIFETIME - EEXPIRED - EGUID - EGUID1 - ETRIAL - ENOKEYFILE - ENOKEYFILE1 - ENOKEYFILE2 - ENOFLASH - ENOFLASH1 - ENOFLASH2 - ENOFLASH3 - ENOFLASH4 - ENOFLASH5 - ENOFLASH6 - ENOFLASH7 - EBLACKLISTED - EBLACKLISTED1 - EBLACKLISTED2 - ENOCONN -} - -type Vars implements Node { - id: ID! - - """Unraid version""" - version: String - maxArraysz: Int - maxCachesz: Int - - """Machine hostname""" - name: String - timeZone: String - comment: String - security: String - workgroup: String - domain: String - domainShort: String - hideDotFiles: Boolean - localMaster: Boolean - enableFruit: String - - """Should a NTP server be used for time sync?""" - useNtp: Boolean - - """NTP Server 1""" - ntpServer1: String - - """NTP Server 2""" - ntpServer2: String - - """NTP Server 3""" - ntpServer3: String - - """NTP Server 4""" - ntpServer4: String - domainLogin: String - sysModel: String - sysArraySlots: Int - sysCacheSlots: Int - sysFlashSlots: Int - useSsl: Boolean - - """Port for the webui via HTTP""" - port: Int - - """Port for the webui via HTTPS""" - portssl: Int - localTld: String - bindMgt: Boolean - - """Should telnet be enabled?""" - useTelnet: Boolean - porttelnet: Int - useSsh: Boolean - portssh: Int - startPage: String - startArray: Boolean - spindownDelay: String - queueDepth: String - spinupGroups: Boolean - defaultFormat: String - defaultFsType: String - shutdownTimeout: Int - luksKeyfile: String - pollAttributes: String - pollAttributesDefault: String - pollAttributesStatus: String - nrRequests: Int - nrRequestsDefault: Int - nrRequestsStatus: String - mdNumStripes: Int - mdNumStripesDefault: Int - mdNumStripesStatus: String - mdSyncWindow: Int - mdSyncWindowDefault: Int - mdSyncWindowStatus: String - mdSyncThresh: Int - mdSyncThreshDefault: Int - mdSyncThreshStatus: String - mdWriteMethod: Int - mdWriteMethodDefault: String - mdWriteMethodStatus: String - shareDisk: String - shareUser: String - shareUserInclude: String - shareUserExclude: String - shareSmbEnabled: Boolean - shareNfsEnabled: Boolean - shareAfpEnabled: Boolean - shareInitialOwner: String - shareInitialGroup: String - shareCacheEnabled: Boolean - shareCacheFloor: String - shareMoverSchedule: String - shareMoverLogging: Boolean - fuseRemember: String - fuseRememberDefault: String - fuseRememberStatus: String - fuseDirectio: String - fuseDirectioDefault: String - fuseDirectioStatus: String - shareAvahiEnabled: Boolean - shareAvahiSmbName: String - shareAvahiSmbModel: String - shareAvahiAfpName: String - shareAvahiAfpModel: String - safeMode: Boolean - startMode: String - configValid: Boolean - configError: ConfigErrorState - joinStatus: String - deviceCount: Int - flashGuid: String - flashProduct: String - flashVendor: String - regCheck: String - regFile: String - regGuid: String - regTy: registrationType - regState: RegistrationState - - """Registration owner""" - regTo: String - regTm: String - regTm2: String - regGen: String - sbName: String - sbVersion: String - sbUpdated: String - sbEvents: Int - sbState: String - sbClean: Boolean - sbSynced: Int - sbSyncErrs: Int - sbSynced2: Int - sbSyncExit: String - sbNumDisks: Int - mdColor: String - mdNumDisks: Int - mdNumDisabled: Int - mdNumInvalid: Int - mdNumMissing: Int - mdNumNew: Int - mdNumErased: Int - mdResync: Int - mdResyncCorr: String - mdResyncPos: String - mdResyncDb: String - mdResyncDt: String - mdResyncAction: String - mdResyncSize: Int - mdState: String - mdVersion: String - cacheNumDevices: Int - cacheSbNumDisks: Int - fsState: String - - """Human friendly string of array events happening""" - fsProgress: String - - """ - Percentage from 0 - 100 while upgrading a disk or swapping parity drives - """ - fsCopyPrcnt: Int - fsNumMounted: Int - fsNumUnmountable: Int - fsUnmountableMask: String - - """Total amount of user shares""" - shareCount: Int - - """Total amount shares with SMB enabled""" - shareSmbCount: Int - - """Total amount shares with NFS enabled""" - shareNfsCount: Int - - """Total amount shares with AFP enabled""" - shareAfpCount: Int - shareMoverActive: Boolean - csrfToken: String -} - -"""Possible error states for configuration""" -enum ConfigErrorState { - UNKNOWN_ERROR - INELIGIBLE - INVALID - NO_KEY_SERVER - WITHDRAWN -} - -type Permission { - resource: Resource! - actions: [String!]! -} - -"""Available resources for permissions""" -enum Resource { - API_KEY - ARRAY - CLOUD - CONFIG - CONNECT - CONNECT__REMOTE_ACCESS - CUSTOMIZATIONS - DASHBOARD - DISK - DISPLAY - DOCKER - FLASH - INFO - LOGS - ME - NETWORK - NOTIFICATIONS - ONLINE - OS - OWNER - PERMISSION - REGISTRATION - SERVERS - SERVICES - SHARE - VARS - VMS - WELCOME -} - -type ApiKey { - id: ID! - name: String! - description: String - roles: [Role!]! - createdAt: String! - permissions: [Permission!]! -} - -"""Available roles for API keys and users""" -enum Role { - ADMIN - CONNECT - GUEST -} - -type ApiKeyWithSecret { - id: ID! - name: String! - description: String - roles: [Role!]! - createdAt: String! - permissions: [Permission!]! - key: String! -} - -type ArrayMutations { - """Set array state""" - setState(input: ArrayStateInput!): UnraidArray! - - """Add new disk to array""" - addDiskToArray(input: ArrayDiskInput!): UnraidArray! - - """ - Remove existing disk from array. NOTE: The array must be stopped before running this otherwise it'll throw an error. - """ - removeDiskFromArray(input: ArrayDiskInput!): UnraidArray! - - """Mount a disk in the array""" - mountArrayDisk(id: String!): ArrayDisk! - - """Unmount a disk from the array""" - unmountArrayDisk(id: String!): ArrayDisk! - - """Clear statistics for a disk in the array""" - clearArrayDiskStatistics(id: String!): Boolean! -} - -input ArrayStateInput { - """Array state""" - desiredState: ArrayStateInputState! -} - -enum ArrayStateInputState { - START - STOP -} - -input ArrayDiskInput { - """Disk ID""" - id: ID! - - """The slot for the disk""" - slot: Int -} - -type DockerMutations { - """Start a container""" - start(id: String!): DockerContainer! - - """Stop a container""" - stop(id: String!): DockerContainer! -} - -type ParityCheck { - """Date of the parity check""" - date: DateTime - - """Duration of the parity check in seconds""" - duration: Int - - """Speed of the parity check, in MB/s""" - speed: String - - """Status of the parity check""" - status: String - - """Number of errors during the parity check""" - errors: Int - - """Progress percentage of the parity check""" - progress: Int - - """Whether corrections are being written to parity""" - correcting: Boolean - - """Whether the parity check is paused""" - paused: Boolean - - """Whether the parity check is running""" - running: Boolean -} - -""" -A date-time string at UTC, such as 2019-12-03T09:54:33Z, compliant with the date-time format. -""" -scalar DateTime - -type Config implements Node { - id: ID! - valid: Boolean - error: String -} - -type InfoApps implements Node { - id: ID! - - """How many docker containers are installed""" - installed: Int! - - """How many docker containers are running""" - started: Int! -} - -type Baseboard implements Node { - id: ID! - manufacturer: String! - model: String - version: String - serial: String - assetTag: String -} - -type InfoCpu implements Node { - id: ID! - manufacturer: String! - brand: String! - vendor: String! - family: String! - model: String! - stepping: Int! - revision: String! - voltage: String - speed: Float! - speedmin: Float! - speedmax: Float! - threads: Int! - cores: Int! - processors: Int! - socket: String! - cache: JSON! - flags: [String!]! -} - -type Gpu implements Node { - id: ID! - type: String! - typeid: String! - vendorname: String! - productid: String! - blacklisted: Boolean! - class: String! -} - -type Pci implements Node { - id: ID! - type: String - typeid: String - vendorname: String - vendorid: String - productname: String - productid: String - blacklisted: String - class: String -} - -type Usb { - id: ID! - name: String -} - -type Devices implements Node { - id: ID! - gpu: [Gpu!]! - pci: [Pci!]! - usb: [Usb!]! -} - -type Case implements Node { - id: ID! - icon: String - url: String - error: String - base64: String -} - -type Display implements Node { - id: ID! - case: Case - date: String - number: String - scale: Boolean - tabs: Boolean - users: String - resize: Boolean - wwn: Boolean - total: Boolean - usage: Boolean - banner: String - dashapps: String - theme: Theme - text: Boolean - unit: Temperature - warning: Int - critical: Int - hot: Int - max: Int - locale: String -} - -"""Display theme""" -enum Theme { - white -} - -"""Temperature unit (Celsius or Fahrenheit)""" -enum Temperature { - C - F -} - -type MemoryLayout { - size: Int! - bank: String - type: String - clockSpeed: Int - formFactor: String - manufacturer: String - partNum: String - serialNum: String - voltageConfigured: Int - voltageMin: Int - voltageMax: Int -} - -type InfoMemory implements Node { - id: ID! - max: Int! - total: Int! - free: Int! - used: Int! - active: Int! - available: Int! - buffcache: Int! - swaptotal: Int! - swapused: Int! - swapfree: Int! - layout: [MemoryLayout!]! -} - -type Os implements Node { - id: ID! - platform: String - distro: String - release: String - codename: String - kernel: String - arch: String - hostname: String - codepage: String - logofile: String - serial: String - build: String - uptime: String -} - -type System implements Node { - id: ID! - manufacturer: String - model: String - version: String - serial: String - uuid: String - sku: String -} - -type Versions implements Node { - id: ID! - kernel: String - openssl: String - systemOpenssl: String - systemOpensslLib: String - node: String - v8: String - npm: String - yarn: String - pm2: String - gulp: String - grunt: String - git: String - tsc: String - mysql: String - redis: String - mongodb: String - apache: String - nginx: String - php: String - docker: String - postfix: String - postgresql: String - perl: String - python: String - gcc: String - unraid: String -} - -type Info implements Node { - id: ID! - - """Count of docker containers""" - apps: InfoApps! - baseboard: Baseboard! - cpu: InfoCpu! - devices: Devices! - display: Display! - - """Machine ID""" - machineId: ID - memory: InfoMemory! - os: Os! - system: System! - time: DateTime! - versions: Versions! -} - -type ContainerPort { - ip: String - privatePort: Int! - publicPort: Int! - type: ContainerPortType! -} - -enum ContainerPortType { - TCP - UDP -} - -type ContainerHostConfig { - networkMode: String! -} - -type DockerContainer { - id: ID! - names: [String!]! - image: String! - imageId: String! - command: String! - created: Int! - ports: [ContainerPort!]! - - """Total size of all the files in the container""" - sizeRootFs: Int - labels: JSONObject - state: ContainerState! - status: String! - hostConfig: ContainerHostConfig - networkSettings: JSONObject - mounts: [JSONObject!] - autoStart: Boolean! -} - -""" -The `JSONObject` scalar type represents JSON objects as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf). -""" -scalar JSONObject - -enum ContainerState { - RUNNING - EXITED -} - -type DockerNetwork { - name: String! - id: ID! - created: String! - scope: String! - driver: String! - enableIPv6: Boolean! - ipam: JSONObject! - internal: Boolean! - attachable: Boolean! - ingress: Boolean! - configFrom: JSONObject! - configOnly: Boolean! - containers: JSONObject! - options: JSONObject! - labels: JSONObject! -} - -type Docker implements Node { - id: ID! - containers: [DockerContainer!]! - networks: [DockerNetwork!]! -} - -type Flash implements Node { - id: ID! - guid: String! - vendor: String! - product: String! -} - -type LogFile { - """Name of the log file""" - name: String! - - """Full path to the log file""" - path: String! - - """Size of the log file in bytes""" - size: Int! - - """Last modified timestamp""" - modifiedAt: DateTime! -} - -type LogFileContent { - """Path to the log file""" - path: String! - - """Content of the log file""" - content: String! - - """Total number of lines in the file""" - totalLines: Int! - - """Starting line number of the content (1-indexed)""" - startLine: Int -} - -type NotificationCounts { - info: Int! - warning: Int! - alert: Int! - total: Int! -} - -type NotificationOverview { - unread: NotificationCounts! - archive: NotificationCounts! -} - -type Notification { - id: ID! - - """Also known as 'event'""" - title: String! - subject: String! - description: String! - importance: NotificationImportance! - link: String - type: NotificationType! - - """ISO Timestamp for when the notification occurred""" - timestamp: String - formattedTimestamp: String -} - -enum NotificationImportance { - ALERT - INFO - WARNING -} - -enum NotificationType { - UNREAD - ARCHIVE -} - -type Notifications { - id: ID! - - """A cached overview of the notifications in the system & their severity.""" - overview: NotificationOverview! - list(filter: NotificationFilter!): [Notification!]! -} - -input NotificationFilter { - importance: NotificationImportance - type: NotificationType! - offset: Int! - limit: Int! -} - -type Owner { - username: String! - url: String! - avatar: String! -} - -type VmDomain { - uuid: ID! - - """A friendly name for the vm""" - name: String - - """Current domain vm state""" - state: VmState! -} - -"""The state of a virtual machine""" -enum VmState { - NOSTATE - RUNNING - IDLE - PAUSED - SHUTDOWN - SHUTOFF - CRASHED - PMSUSPENDED -} - -type Vms { - id: ID! - domains: [VmDomain!] -} - -type Uptime { - timestamp: String -} - -type Service implements Node { - id: ID! - name: String - online: Boolean - uptime: Uptime - version: String -} - -type UserAccount { - """A unique identifier for the user""" - id: ID! - - """The name of the user""" - name: String! - - """A description of the user""" - description: String! - - """The roles of the user""" - roles: [Role!]! - - """The permissions of the user""" - permissions: [Permission!] -} - -type Query { - apiKeys: [ApiKey!]! - apiKey(id: String!): ApiKey - cloud: Cloud! - config: Config! - display: Display! - flash: Flash! - info: Info! - logFiles: [LogFile!]! - logFile(path: String!, lines: Int, startLine: Int): LogFileContent! - me: UserAccount! - network: Network! - - """Get all notifications""" - notifications: Notifications! - online: Boolean! - owner: Owner! - registration: Registration - server: Server - servers: [Server!]! - services: [Service!]! - shares: [Share!]! - vars: Vars! - vms: Vms! - parityHistory: [ParityCheck!]! - array: UnraidArray! - connect: Connect! - remoteAccess: RemoteAccess! - extraAllowedOrigins: [String!]! - docker: Docker! - disks: [Disk!]! - disk(id: String!): Disk! -} - -type Mutation { - createApiKey(input: CreateApiKeyInput!): ApiKeyWithSecret! - addRoleForApiKey(input: AddRoleForApiKeyInput!): Boolean! - removeRoleFromApiKey(input: RemoveRoleFromApiKeyInput!): Boolean! - - """Creates a new notification record""" - createNotification(input: NotificationData!): Notification! - deleteNotification(id: String!, type: NotificationType!): NotificationOverview! - - """Deletes all archived notifications on server.""" - deleteArchivedNotifications: NotificationOverview! - - """Marks a notification as archived.""" - archiveNotification(id: String!): Notification! - archiveNotifications(ids: [String!]!): NotificationOverview! - archiveAll(importance: NotificationImportance): NotificationOverview! - - """Marks a notification as unread.""" - unreadNotification(id: String!): Notification! - unarchiveNotifications(ids: [String!]!): NotificationOverview! - unarchiveAll(importance: NotificationImportance): NotificationOverview! - - """Reads each notification to recompute & update the overview.""" - recalculateOverview: NotificationOverview! - array: ArrayMutations! - docker: DockerMutations! - - """Start a virtual machine""" - startVm(id: String!): Boolean! - - """Stop a virtual machine""" - stopVm(id: String!): Boolean! - - """Pause a virtual machine""" - pauseVm(id: String!): Boolean! - - """Resume a virtual machine""" - resumeVm(id: String!): Boolean! - - """Force stop a virtual machine""" - forceStopVm(id: String!): Boolean! - - """Reboot a virtual machine""" - rebootVm(id: String!): Boolean! - - """Reset a virtual machine""" - resetVm(id: String!): Boolean! - startParityCheck(correct: Boolean!): JSON! - pauseParityCheck: JSON! - resumeParityCheck: JSON! - cancelParityCheck: JSON! - updateApiSettings(input: ApiSettingsInput!): ConnectSettingsValues! - connectSignIn(input: ConnectSignInInput!): Boolean! - connectSignOut: Boolean! - setupRemoteAccess(input: SetupRemoteAccessInput!): Boolean! - setAdditionalAllowedOrigins(input: AllowedOriginInput!): [String!]! - enableDynamicRemoteAccess(input: EnableDynamicRemoteAccessInput!): Boolean! -} - -input CreateApiKeyInput { - name: String! - description: String - roles: [Role!] - permissions: [AddPermissionInput!] - - """ - This will replace the existing key if one already exists with the same name, otherwise returns the existing key - """ - overwrite: Boolean -} - -input AddPermissionInput { - resource: Resource! - actions: [String!]! -} - -input AddRoleForApiKeyInput { - apiKeyId: ID! - role: Role! -} - -input RemoveRoleFromApiKeyInput { - apiKeyId: ID! - role: Role! -} - -input NotificationData { - title: String! - subject: String! - description: String! - importance: NotificationImportance! - link: String -} - -input ApiSettingsInput { - """ - If true, the GraphQL sandbox will be enabled and available at /graphql. If false, the GraphQL sandbox will be disabled and only the production API will be available. - """ - sandbox: Boolean - - """A list of origins allowed to interact with the API""" - extraOrigins: [String!] - - """The type of WAN access to use for Remote Access""" - accessType: WAN_ACCESS_TYPE - - """The type of port forwarding to use for Remote Access""" - forwardType: WAN_FORWARD_TYPE - - """ - The port to use for Remote Access. Not required for UPNP forwardType. Required for STATIC forwardType. Ignored if accessType is DISABLED or forwardType is UPNP. - """ - port: Int - - """A list of Unique Unraid Account ID's""" - ssoUserIds: [String!] -} - -input ConnectSignInInput { - """The API key for authentication""" - apiKey: String! - - """The ID token for authentication""" - idToken: String - - """User information for the sign-in""" - userInfo: ConnectUserInfoInput - - """The access token for authentication""" - accessToken: String - - """The refresh token for authentication""" - refreshToken: String -} - -input ConnectUserInfoInput { - """The preferred username of the user""" - preferred_username: String! - - """The email address of the user""" - email: String! - - """The avatar URL of the user""" - avatar: String -} - -input SetupRemoteAccessInput { - """The type of WAN access to use for Remote Access""" - accessType: WAN_ACCESS_TYPE! - - """The type of port forwarding to use for Remote Access""" - forwardType: WAN_FORWARD_TYPE - - """ - The port to use for Remote Access. Not required for UPNP forwardType. Required for STATIC forwardType. Ignored if accessType is DISABLED or forwardType is UPNP. - """ - port: Int -} - -input AllowedOriginInput { - """A list of origins allowed to interact with the API""" - origins: [String!]! -} - -input EnableDynamicRemoteAccessInput { - """The URL for dynamic remote access""" - url: URL! - - """Whether to enable or disable dynamic remote access""" - enabled: Boolean! -} - -type Subscription { - displaySubscription: Display! - infoSubscription: Info! - logFile(path: String!): LogFileContent! - notificationAdded: Notification! - notificationsOverview: NotificationOverview! - ownerSubscription: Owner! - registrationSubscription: Registration! - serversSubscription: Server! - parityHistorySubscription: ParityCheck! - arraySubscription: UnraidArray! -} \ No newline at end of file diff --git a/api/generated-schema.graphql b/api/generated-schema.graphql index bcd89baaa..b71ad6150 100644 --- a/api/generated-schema.graphql +++ b/api/generated-schema.graphql @@ -916,12 +916,91 @@ type BackupMutations { input CreateBackupJobConfigInput { name: String! + backupMode: BackupMode! = PREPROCESSING sourcePath: String! remoteName: String! destinationPath: String! schedule: String! enabled: Boolean! = true rcloneOptions: JSON + + """Preprocessing configuration for this backup job""" + preprocessConfig: PreprocessConfigInput +} + +""" +The mode of backup to perform (Raw file backup or Preprocessing-based). +""" +enum BackupMode { + RAW + PREPROCESSING +} + +input PreprocessConfigInput { + """Type of preprocessing to perform""" + type: PreprocessType! + zfsConfig: ZfsPreprocessConfigInput + flashConfig: FlashPreprocessConfigInput + scriptConfig: ScriptPreprocessConfigInput + + """Timeout for preprocessing in seconds""" + timeout: Float! = 3600 + + """Whether to cleanup on failure""" + cleanupOnFailure: Boolean! = true +} + +"""Type of preprocessing to perform before backup""" +enum PreprocessType { + NONE + ZFS + FLASH + SCRIPT +} + +input ZfsPreprocessConfigInput { + """ZFS pool name""" + poolName: String! + + """Dataset name within the pool""" + datasetName: String! + + """Snapshot name prefix""" + snapshotPrefix: String + + """Whether to cleanup snapshots after backup""" + cleanupSnapshots: Boolean! = true + + """Number of snapshots to retain""" + retainSnapshots: Float +} + +input FlashPreprocessConfigInput { + """Flash drive mount path""" + flashPath: String! = "/boot" + + """Whether to include git history""" + includeGitHistory: Boolean! = true + + """Additional paths to include in backup""" + additionalPaths: [String!] +} + +input ScriptPreprocessConfigInput { + """Path to the script file""" + scriptPath: String! + + """Arguments to pass to the script""" + scriptArgs: [String!] + + """Working directory for script execution""" + workingDirectory: String + + """Environment variables for script execution""" + environment: JSON + + """Output file path where script should write data""" + outputPath: String! } input UpdateBackupJobConfigInput { @@ -932,7 +1011,12 @@ input UpdateBackupJobConfigInput { schedule: String enabled: Boolean rcloneOptions: JSON + + """Preprocessing configuration for this backup job""" + preprocessConfig: PreprocessConfigInput lastRunStatus: String + currentJobId: String + lastRunAt: String } input InitiateBackupInput { @@ -1067,6 +1151,37 @@ A date-time string at UTC, such as 2019-12-03T09:54:33Z, compliant with the date """ scalar DateTime +type ZfsPreprocessConfig { + poolName: String! + datasetName: String! + snapshotPrefix: String + cleanupSnapshots: Boolean! + retainSnapshots: Float +} + +type FlashPreprocessConfig { + flashPath: String! + includeGitHistory: Boolean! + additionalPaths: [String!] +} + +type ScriptPreprocessConfig { + scriptPath: String! + scriptArgs: [String!] + workingDirectory: String + environment: JSON + outputPath: String! +} + +type PreprocessConfig { + type: PreprocessType! + zfsConfig: ZfsPreprocessConfig + flashConfig: FlashPreprocessConfig + scriptConfig: ScriptPreprocessConfig + timeout: Float! + cleanupOnFailure: Boolean! +} + type RCloneDrive { """Provider name""" name: String! @@ -1211,7 +1326,7 @@ type RCloneJob { configId: PrefixedID """Current status of the job""" - status: RCloneJobStatus + status: BackupJobStatus """Whether the job is finished""" finished: Boolean @@ -1232,11 +1347,11 @@ type RCloneJob { hasRecentJob: Boolean } -"""Status of an RClone job""" -enum RCloneJobStatus { +"""Status of a backup job""" +enum BackupJobStatus { RUNNING COMPLETED - ERROR + FAILED CANCELLED } @@ -1262,6 +1377,7 @@ type BackupJobConfig implements Node { """Human-readable name for this backup job""" name: String! + backupMode: BackupMode! """Source path to backup""" sourcePath: String! @@ -1281,6 +1397,9 @@ type BackupJobConfig implements Node { """RClone options (e.g., --transfers, --checkers)""" rcloneOptions: JSON + """Preprocessing configuration for this backup job""" + preprocessConfig: PreprocessConfig + """When this config was created""" createdAt: DateTime! diff --git a/api/src/__test__/graphql/resolvers/rclone-api.service.test.ts b/api/src/__test__/graphql/resolvers/rclone-api.service.test.ts index ade9af455..f6716de78 100644 --- a/api/src/__test__/graphql/resolvers/rclone-api.service.test.ts +++ b/api/src/__test__/graphql/resolvers/rclone-api.service.test.ts @@ -87,6 +87,27 @@ describe('RCloneApiService', () => { formatDuration: vi.fn(), } as any; + // Mock RCloneStatusService + const mockStatusService = { + enhanceStatsWithFormattedFields: vi.fn(), + transformStatsToJob: vi.fn(), + calculateCombinedStats: vi.fn(), + parseActiveJobs: vi.fn(), + parseBackupStatus: vi.fn(), + parseJobWithStats: vi.fn(), + parseAllJobsWithStats: vi.fn(), + parseJobsWithStats: vi.fn(), + getBackupStatus: vi.fn(), + } as any; + + // Mock StreamingJobManager + const mockStreamingJobManager = { + startJob: vi.fn(), + stopJob: vi.fn(), + getJobStatus: vi.fn(), + getAllJobs: vi.fn(), + } as any; + // Mock cache manager mockCacheManager = { get: vi.fn().mockResolvedValue(null), @@ -94,7 +115,7 @@ describe('RCloneApiService', () => { del: vi.fn().mockResolvedValue(undefined), }; - service = new RCloneApiService(mockFormatService, mockCacheManager); + service = new RCloneApiService(mockStatusService, mockStreamingJobManager); await service.onModuleInit(); }); diff --git a/api/src/unraid-api/graph/resolvers/backup/backup-config.service.ts b/api/src/unraid-api/graph/resolvers/backup/backup-config.service.ts index 6218b839e..89edd2907 100644 --- a/api/src/unraid-api/graph/resolvers/backup/backup-config.service.ts +++ b/api/src/unraid-api/graph/resolvers/backup/backup-config.service.ts @@ -7,12 +7,19 @@ import { join } from 'path'; import { CronJob } from 'cron'; import { v4 as uuidv4 } from 'uuid'; +import type { + PreprocessConfigInput, + PreprocessResult, +} from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.js'; import { getters } from '@app/store/index.js'; import { BackupJobConfig, + BackupMode, CreateBackupJobConfigInput, UpdateBackupJobConfigInput, } from '@app/unraid-api/graph/resolvers/backup/backup.model.js'; +import { PreprocessingService } from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.service.js'; +import { PreprocessType } from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.js'; import { RCloneService } from '@app/unraid-api/graph/resolvers/rclone/rclone.service.js'; const JOB_GROUP_PREFIX = 'backup-'; @@ -25,7 +32,9 @@ interface BackupJobConfigData { destinationPath: string; schedule: string; enabled: boolean; + backupMode?: BackupMode; rcloneOptions?: Record; + preprocessConfig?: PreprocessConfigInput; createdAt: string; updatedAt: string; lastRunAt?: string; @@ -41,7 +50,8 @@ export class BackupConfigService implements OnModuleInit { constructor( private readonly rcloneService: RCloneService, - private readonly schedulerRegistry: SchedulerRegistry + private readonly schedulerRegistry: SchedulerRegistry, + private readonly preprocessingService: PreprocessingService ) { const paths = getters.paths(); this.configPath = join(paths.backupBase, 'backup-jobs.json'); @@ -76,17 +86,35 @@ export class BackupConfigService implements OnModuleInit { id: string, input: UpdateBackupJobConfigInput ): Promise { + this.logger.debug( + `[updateBackupJobConfig] Called with ID: ${id}, Input: ${JSON.stringify(input)}` + ); const existing = this.configs.get(id); - if (!existing) return null; + if (!existing) { + this.logger.warn(`[updateBackupJobConfig] No existing config found for ID: ${id}`); + return null; + } + this.logger.debug( + `[updateBackupJobConfig] Existing config for ID ${id}: ${JSON.stringify(existing)}` + ); const updated: BackupJobConfigData = { ...existing, ...input, updatedAt: new Date().toISOString(), }; + this.logger.debug( + `[updateBackupJobConfig] Updated object for ID ${id} (before set): ${JSON.stringify(updated)}` + ); this.configs.set(id, updated); + const immediatelyAfterSet = this.configs.get(id); + this.logger.debug( + `[updateBackupJobConfig] Config for ID ${id} (immediately after set): ${JSON.stringify(immediatelyAfterSet)}` + ); + await this.saveConfigs(); + this.logger.debug(`[updateBackupJobConfig] Configs saved for ID: ${id}`); this.unscheduleJob(id); if (updated.enabled) { @@ -107,7 +135,15 @@ export class BackupConfigService implements OnModuleInit { } async getBackupJobConfig(id: string): Promise { + this.logger.debug(`[getBackupJobConfig] Called for ID: ${id}`); const config = this.configs.get(id); + if (config) { + this.logger.debug( + `[getBackupJobConfig] Found config for ID ${id}: ${JSON.stringify(config)}` + ); + } else { + this.logger.warn(`[getBackupJobConfig] No config found for ID: ${id}`); + } return config ? this.mapToGraphQL(config) : null; } @@ -119,13 +155,66 @@ export class BackupConfigService implements OnModuleInit { this.logger.log(`Executing backup job: ${config.name}`); try { - const result = (await this.rcloneService['rcloneApiService'].startBackup({ - srcPath: config.sourcePath, - dstPath: `${config.remoteName}:${config.destinationPath}`, - async: true, - configId: config.id, - options: config.rcloneOptions || {}, - })) as { jobId?: string; jobid?: string }; + let sourcePath = config.sourcePath; + let preprocessResult: PreprocessResult | null = null; + + if (config.preprocessConfig && config.preprocessConfig.type !== PreprocessType.NONE) { + this.logger.log(`Running preprocessing for job: ${config.name}`); + + preprocessResult = await this.preprocessingService.executePreprocessing( + config.preprocessConfig, + { + jobId: config.id, + onProgress: (progress) => { + this.logger.debug(`Preprocessing progress for ${config.name}: ${progress}%`); + }, + onOutput: (data) => { + this.logger.debug(`Preprocessing output for ${config.name}: ${data}`); + }, + onError: (error) => { + this.logger.error(`Preprocessing error for ${config.name}: ${error}`); + }, + } + ); + + if (!preprocessResult.success) { + throw new Error(`Preprocessing failed: ${preprocessResult.error}`); + } + + if (preprocessResult.streamPath) { + sourcePath = preprocessResult.streamPath; + this.logger.log(`Using streaming source for backup: ${sourcePath}`); + } else if (preprocessResult.outputPath) { + sourcePath = preprocessResult.outputPath; + this.logger.log(`Using preprocessed output for backup: ${sourcePath}`); + } + } + + const isStreamingBackup = + preprocessResult?.streamPath && + (config.preprocessConfig?.type === PreprocessType.ZFS || + config.preprocessConfig?.type === PreprocessType.FLASH); + + let result; + if (isStreamingBackup && preprocessResult?.streamPath) { + const streamingOptions = this.buildStreamingOptions( + config.preprocessConfig!.type, + preprocessResult.streamPath, + config.remoteName, + config.destinationPath + ); + + result = + await this.rcloneService['rcloneApiService'].startStreamingBackup(streamingOptions); + } else { + result = (await this.rcloneService['rcloneApiService'].startBackup({ + srcPath: sourcePath, + dstPath: `${config.remoteName}:${config.destinationPath}`, + async: true, + configId: config.id, + options: config.rcloneOptions || {}, + })) as { jobId?: string; jobid?: string }; + } const jobId = result.jobId || result.jobid; @@ -145,6 +234,47 @@ export class BackupConfigService implements OnModuleInit { await this.saveConfigs(); this.logger.error(`Backup job ${config.name} failed:`, error); + + if (config.preprocessConfig?.cleanupOnFailure) { + try { + await this.preprocessingService.cleanup(config.id); + } catch (cleanupError) { + this.logger.error( + `Failed to cleanup preprocessing for job ${config.name}:`, + cleanupError + ); + } + } + } + } + + private buildStreamingOptions( + preprocessType: PreprocessType, + streamPath: string, + remoteName: string, + destinationPath: string + ) { + switch (preprocessType) { + case PreprocessType.ZFS: + return { + remoteName, + remotePath: destinationPath, + sourceCommand: 'zfs', + sourceArgs: ['send', streamPath], + preprocessType, + timeout: 3600000, + }; + case PreprocessType.FLASH: + return { + remoteName, + remotePath: destinationPath, + sourceCommand: 'tar', + sourceArgs: ['cf', '-', streamPath], + preprocessType, + timeout: 3600000, + }; + default: + throw new Error(`Unsupported streaming preprocessing type: ${preprocessType}`); } } @@ -229,15 +359,23 @@ export class BackupConfigService implements OnModuleInit { } private mapToGraphQL(config: BackupJobConfigData): BackupJobConfig { + const preprocessConfig = config.preprocessConfig + ? { + ...config.preprocessConfig, + } + : undefined; + return { id: config.id, name: config.name, + backupMode: config.backupMode || BackupMode.PREPROCESSING, sourcePath: config.sourcePath, remoteName: config.remoteName, destinationPath: config.destinationPath, schedule: config.schedule, enabled: config.enabled, rcloneOptions: config.rcloneOptions, + preprocessConfig: preprocessConfig, createdAt: new Date(config.createdAt), updatedAt: new Date(config.updatedAt), lastRunAt: config.lastRunAt ? new Date(config.lastRunAt) : undefined, diff --git a/api/src/unraid-api/graph/resolvers/backup/backup-mutations.resolver.ts b/api/src/unraid-api/graph/resolvers/backup/backup-mutations.resolver.ts index 0f6f726d9..01cf0f5b6 100644 --- a/api/src/unraid-api/graph/resolvers/backup/backup-mutations.resolver.ts +++ b/api/src/unraid-api/graph/resolvers/backup/backup-mutations.resolver.ts @@ -173,20 +173,13 @@ export class BackupMutationsResolver { config.id ); - // Store the job ID in the config if successful + // Store the job ID and update timestamps in the config if successful if (result.jobId) { await this.backupConfigService.updateBackupJobConfig(id, { lastRunStatus: `Started with job ID: ${result.jobId}`, + currentJobId: result.jobId, + lastRunAt: new Date().toISOString(), }); - - // Update the currentJobId in the config - const configData = this.backupConfigService['configs'].get(id); - if (configData) { - configData.currentJobId = result.jobId; - configData.lastRunAt = new Date().toISOString(); - this.backupConfigService['configs'].set(id, configData); - await this.backupConfigService['saveConfigs'](); - } } return result; diff --git a/api/src/unraid-api/graph/resolvers/backup/backup.model.ts b/api/src/unraid-api/graph/resolvers/backup/backup.model.ts index ae6c857eb..a60624616 100644 --- a/api/src/unraid-api/graph/resolvers/backup/backup.model.ts +++ b/api/src/unraid-api/graph/resolvers/backup/backup.model.ts @@ -1,14 +1,39 @@ -import { Field, InputType, ObjectType } from '@nestjs/graphql'; +import { Field, InputType, ObjectType, registerEnumType } from '@nestjs/graphql'; import { type Layout } from '@jsonforms/core'; -import { IsBoolean, IsNotEmpty, IsObject, IsOptional, IsString, Matches } from 'class-validator'; +import { Type } from 'class-transformer'; +import { + IsBoolean, + IsEnum, + IsNotEmpty, + IsObject, + IsOptional, + IsString, + Matches, + ValidateIf, + ValidateNested, +} from 'class-validator'; import { GraphQLJSON } from 'graphql-scalars'; +import { + PreprocessConfig, + PreprocessConfigInput, +} from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.js'; import { Node } from '@app/unraid-api/graph/resolvers/base.model.js'; import { RCloneJob } from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js'; import { PrefixedID } from '@app/unraid-api/graph/scalars/graphql-type-prefixed-id.js'; import { DataSlice } from '@app/unraid-api/types/json-forms.js'; +export enum BackupMode { + RAW = 'RAW', + PREPROCESSING = 'PREPROCESSING', +} + +registerEnumType(BackupMode, { + name: 'BackupMode', + description: 'The mode of backup to perform (Raw file backup or Preprocessing-based).', +}); + @ObjectType({ implements: () => Node, }) @@ -73,6 +98,9 @@ export class BackupJobConfig extends Node { @Field(() => String, { description: 'Human-readable name for this backup job' }) name!: string; + @Field(() => BackupMode) + backupMode!: BackupMode; + @Field(() => String, { description: 'Source path to backup' }) sourcePath!: string; @@ -96,6 +124,12 @@ export class BackupJobConfig extends Node { }) rcloneOptions?: Record; + @Field(() => PreprocessConfig, { + description: 'Preprocessing configuration for this backup job', + nullable: true, + }) + preprocessConfig?: PreprocessConfig; + @Field(() => Date, { description: 'When this config was created' }) createdAt!: Date; @@ -122,9 +156,15 @@ export class CreateBackupJobConfigInput { @IsNotEmpty() name!: string; + @Field(() => BackupMode, { defaultValue: BackupMode.PREPROCESSING }) + @IsEnum(BackupMode) + @IsNotEmpty() + backupMode?: BackupMode; + @Field(() => String) @IsString() - @IsNotEmpty() + @ValidateIf((o) => o.backupMode === BackupMode.RAW) + @IsNotEmpty({ message: 'sourcePath should not be empty when backupMode is RAW' }) sourcePath!: string; @Field(() => String) @@ -156,6 +196,16 @@ export class CreateBackupJobConfigInput { @IsOptional() @IsObject() rcloneOptions?: Record; + + @Field(() => PreprocessConfigInput, { + description: 'Preprocessing configuration for this backup job', + nullable: true, + }) + @IsOptional() + @ValidateIf((o) => o.backupMode === BackupMode.PREPROCESSING) + @ValidateNested() + @Type(() => PreprocessConfigInput) + preprocessConfig?: PreprocessConfigInput; } @InputType() @@ -206,10 +256,29 @@ export class UpdateBackupJobConfigInput { @IsObject() rcloneOptions?: Record; + @Field(() => PreprocessConfigInput, { + description: 'Preprocessing configuration for this backup job', + nullable: true, + }) + @IsOptional() + @ValidateNested() + @Type(() => PreprocessConfigInput) + preprocessConfig?: PreprocessConfigInput; + @Field(() => String, { nullable: true }) @IsOptional() @IsString() lastRunStatus?: string; + + @Field(() => String, { nullable: true }) + @IsOptional() + @IsString() + currentJobId?: string; + + @Field(() => String, { nullable: true }) + @IsOptional() + @IsString() + lastRunAt?: string; } @ObjectType() diff --git a/api/src/unraid-api/graph/resolvers/backup/backup.module.ts b/api/src/unraid-api/graph/resolvers/backup/backup.module.ts index 95a8096e5..9e41e7aee 100644 --- a/api/src/unraid-api/graph/resolvers/backup/backup.module.ts +++ b/api/src/unraid-api/graph/resolvers/backup/backup.module.ts @@ -7,10 +7,11 @@ import { BackupJobConfigResolver, BackupResolver, } from '@app/unraid-api/graph/resolvers/backup/backup.resolver.js'; +import { PreprocessingModule } from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.module.js'; import { RCloneModule } from '@app/unraid-api/graph/resolvers/rclone/rclone.module.js'; @Module({ - imports: [RCloneModule, ScheduleModule.forRoot()], + imports: [RCloneModule, ScheduleModule.forRoot(), PreprocessingModule], providers: [BackupResolver, BackupJobConfigResolver, BackupMutationsResolver, BackupConfigService], exports: [], }) diff --git a/api/src/unraid-api/graph/resolvers/backup/backup.resolver.ts b/api/src/unraid-api/graph/resolvers/backup/backup.resolver.ts index 59b27b49b..b7cb3bc16 100644 --- a/api/src/unraid-api/graph/resolvers/backup/backup.resolver.ts +++ b/api/src/unraid-api/graph/resolvers/backup/backup.resolver.ts @@ -10,14 +10,16 @@ import { BackupJobConfigFormInput, BackupStatus, } from '@app/unraid-api/graph/resolvers/backup/backup.model.js'; +import { + BACKUP_JOB_GROUP_PREFIX, + getBackupJobGroupId, +} from '@app/unraid-api/graph/resolvers/backup/backup.utils.js'; import { buildBackupJobConfigSchema } from '@app/unraid-api/graph/resolvers/backup/jsonforms/backup-jsonforms-config.js'; -import { RCloneJob, RCloneJobStatus } from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js'; +import { BackupJobStatus, RCloneJob } from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js'; import { RCloneService } from '@app/unraid-api/graph/resolvers/rclone/rclone.service.js'; import { PrefixedID } from '@app/unraid-api/graph/scalars/graphql-type-prefixed-id.js'; import { FormatService } from '@app/unraid-api/utils/format.service.js'; -const JOB_GROUP_PREFIX = 'backup-'; - @Resolver(() => Backup) export class BackupResolver { private readonly logger = new Logger(BackupResolver.name); @@ -87,12 +89,10 @@ export class BackupResolver { async backupJobConfigForm( @Args('input', { nullable: true }) input?: BackupJobConfigFormInput ): Promise { - const remoteNames = await this.rcloneService.getConfiguredRemotes(); - const showAdvanced = input?.showAdvanced ?? false; + const remotes = await this.rcloneService.getRemoteDetails(); const { dataSchema, uiSchema } = buildBackupJobConfigSchema({ - remoteNames, - showAdvanced, + remotes, }); return { @@ -115,7 +115,7 @@ export class BackupResolver { this.logger.debug('backupJobs called - returning all jobs for frontend filtering'); const jobs = (await this.rcloneService['rcloneApiService'].getAllJobsWithStats()).filter( - (job) => job.group?.startsWith(JOB_GROUP_PREFIX) + (job) => job.group?.startsWith(BACKUP_JOB_GROUP_PREFIX) ); this.logger.debug(`Returning ${jobs.length} jobs total for frontend filtering`); @@ -140,9 +140,21 @@ export class BackupJobConfigResolver { }) async currentJob(@Parent() config: BackupJobConfig): Promise { if (!config.currentJobId) { + // If there's no currentJobId, we assume no job is running for this config. + // Or, if currentJobId exists but is an empty string, also assume no job. return null; } + // Construct the group ID using the new utility function. + // const groupId = getBackupJobGroupId(config.id); // Old problematic line + + this.logger.debug( + `Looking for current job for config ${config.id} using currentJobId: ${config.currentJobId}` + ); + + // Pass the specific rclone job ID (config.currentJobId) as the primary identifier. + // The second argument `config.id` is used by getEnhancedJobStatus to populate RCloneJob.configId + // and assist in constructing the full RCloneJob.id. return this.rcloneService.getEnhancedJobStatus(config.currentJobId, config.id); } } diff --git a/api/src/unraid-api/graph/resolvers/backup/backup.utils.ts b/api/src/unraid-api/graph/resolvers/backup/backup.utils.ts new file mode 100644 index 000000000..b0f0dbc4f --- /dev/null +++ b/api/src/unraid-api/graph/resolvers/backup/backup.utils.ts @@ -0,0 +1,32 @@ +export const BACKUP_JOB_GROUP_PREFIX = 'backup-'; + +/** + * Generates the group ID for a backup job based on its configuration ID. + * This group ID is used by RClone to group related backup operations. + * @param configId The ID of the backup job configuration. + * @returns The RClone group ID string. + */ +export function getBackupJobGroupId(configId: string): string { + return `${BACKUP_JOB_GROUP_PREFIX}${configId}`; +} + +/** + * Extracts the configuration ID from a backup job group ID. + * @param groupId The RClone group ID string (e.g., "backup-someConfigId"). + * @returns The configuration ID if the group ID is valid and prefixed, otherwise undefined. + */ +export function getConfigIdFromGroupId(groupId: string): string | undefined { + if (groupId.startsWith(BACKUP_JOB_GROUP_PREFIX)) { + return groupId.substring(BACKUP_JOB_GROUP_PREFIX.length); + } + return undefined; +} + +/** + * Checks if the given ID corresponds to a backup job group. + * @param id The ID string to check (can be a job ID or a group ID). + * @returns True if the ID represents a backup job group, false otherwise. + */ +export function isBackupJobGroup(id: string): boolean { + return id.startsWith(BACKUP_JOB_GROUP_PREFIX); +} diff --git a/api/src/unraid-api/graph/resolvers/backup/jsonforms/backup-jsonforms-config.ts b/api/src/unraid-api/graph/resolvers/backup/jsonforms/backup-jsonforms-config.ts index 4e732bc0c..b1eab79b9 100644 --- a/api/src/unraid-api/graph/resolvers/backup/jsonforms/backup-jsonforms-config.ts +++ b/api/src/unraid-api/graph/resolvers/backup/jsonforms/backup-jsonforms-config.ts @@ -1,11 +1,13 @@ -import type { LabelElement, Layout } from '@jsonforms/core'; -import { JsonSchema7 } from '@jsonforms/core'; +import type { LabelElement, Layout, SchemaBasedCondition } from '@jsonforms/core'; +import { JsonSchema7, RuleEffect } from '@jsonforms/core'; +import type { RCloneRemote } from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js'; import type { DataSlice, SettingSlice, UIElement } from '@app/unraid-api/types/json-forms.js'; +import { BackupMode } from '@app/unraid-api/graph/resolvers/backup/backup.model.js'; import { createLabeledControl } from '@app/unraid-api/graph/utils/form-utils.js'; import { mergeSettingSlices } from '@app/unraid-api/types/json-forms.js'; -function getBasicBackupConfigSlice({ remoteNames = [] }: { remoteNames?: string[] }): SettingSlice { +function getBasicBackupConfigSlice({ remotes = [] }: { remotes?: RCloneRemote[] }): SettingSlice { const basicConfigElements: UIElement[] = [ createLabeledControl({ scope: '#/properties/name', @@ -18,12 +20,23 @@ function getBasicBackupConfigSlice({ remoteNames = [] }: { remoteNames?: string[ }), createLabeledControl({ - scope: '#/properties/sourcePath', - label: 'Source Path', - description: 'The local path to backup (e.g., /mnt/user/Documents)', + scope: '#/properties/backupMode', + label: 'Backup Mode', + description: 'Choose between preprocessing-based backup or raw file backup', controlOptions: { - placeholder: '/mnt/user/', - format: 'string', + suggestions: [ + { + value: BackupMode.PREPROCESSING, + label: 'Preprocessing Backup', + tooltip: + 'Advanced backup using ZFS snapshots, flash drive backup, or custom scripts to prepare data before transfer', + }, + { + value: BackupMode.RAW, + label: 'Raw File Backup', + tooltip: 'Simple folder-to-folder backup with direct file/directory paths', + }, + ], }, }), @@ -32,9 +45,9 @@ function getBasicBackupConfigSlice({ remoteNames = [] }: { remoteNames?: string[ label: 'Remote Configuration', description: 'Select the RClone remote configuration to use for this backup', controlOptions: { - suggestions: remoteNames.map((name) => ({ - value: name, - label: name, + suggestions: remotes.map((remote) => ({ + value: remote.name, + label: `${remote.name} (${remote.type})`, })), }, }), @@ -68,6 +81,16 @@ function getBasicBackupConfigSlice({ remoteNames = [] }: { remoteNames?: string[ label: 'Weekly (Sunday 2:00 AM)', tooltip: 'Runs every Sunday at 2:00 AM', }, + { + value: '0 9 * * 1', + label: 'Mondays at 9:00 AM', + tooltip: 'Runs every Monday at 9:00 AM', + }, + { + value: '0 0 1 * *', + label: 'Monthly (1st day at midnight)', + tooltip: 'Runs on the 1st day of every month at midnight', + }, { value: '0 2 1 * *', label: 'Monthly (1st at 2:00 AM)', @@ -90,23 +113,6 @@ function getBasicBackupConfigSlice({ remoteNames = [] }: { remoteNames?: string[ toggle: true, }, }), - - { - type: 'Label', - text: 'Advanced Options', - options: { - description: 'Optional RClone-specific settings for this backup job.', - }, - } as LabelElement, - - createLabeledControl({ - scope: '#/properties/showAdvanced', - label: 'Show Advanced RClone Options', - description: 'Display additional RClone configuration options', - controlOptions: { - toggle: true, - }, - }), ]; const basicConfigProperties: Record = { @@ -117,17 +123,18 @@ function getBasicBackupConfigSlice({ remoteNames = [] }: { remoteNames?: string[ minLength: 1, maxLength: 100, }, - sourcePath: { + backupMode: { type: 'string', - title: 'Source Path', - description: 'Source path to backup', - minLength: 1, + title: 'Backup Mode', + description: 'Type of backup to perform', + enum: [BackupMode.PREPROCESSING, BackupMode.RAW], + default: BackupMode.PREPROCESSING, }, remoteName: { type: 'string', title: 'Remote Name', description: 'Remote name from rclone config', - enum: remoteNames.length > 0 ? remoteNames : ['No remotes configured'], + enum: remotes.length > 0 ? remotes.map((remote) => remote.name) : ['No remotes configured'], }, destinationPath: { type: 'string', @@ -149,11 +156,13 @@ function getBasicBackupConfigSlice({ remoteNames = [] }: { remoteNames?: string[ description: 'Whether this backup job is enabled', default: true, }, - showAdvanced: { - type: 'boolean', - title: 'Show Advanced Options', - description: 'Whether to show advanced RClone options', - default: false, + configStep: { + type: 'object', + properties: { + current: { type: 'integer', default: 0 }, + total: { type: 'integer', default: 3 }, + }, + default: { current: 0, total: 3 }, }, }; @@ -169,14 +178,103 @@ function getBasicBackupConfigSlice({ remoteNames = [] }: { remoteNames?: string[ }; } -function getAdvancedBackupConfigSlice({ showAdvanced }: { showAdvanced: boolean }): SettingSlice { - if (!showAdvanced) { - return { - properties: {}, - elements: [], - }; - } +function getRawBackupConfigSlice(): SettingSlice { + const rawConfigElements: UIElement[] = [ + { + type: 'Label', + text: 'Raw Backup Configuration', + options: { + description: 'Configure direct file/folder backup with manual source paths.', + }, + } as LabelElement, + createLabeledControl({ + scope: '#/properties/rawConfig/properties/sourcePath', + label: 'Source Path', + description: 'The local path to backup (e.g., /mnt/user/Documents)', + controlOptions: { + placeholder: '/mnt/user/', + format: 'string', + }, + }), + + createLabeledControl({ + scope: '#/properties/rawConfig/properties/excludePatterns', + label: 'Exclude Patterns', + description: 'File patterns to exclude from backup (one per line, supports wildcards)', + controlOptions: { + multi: true, + placeholder: '*.tmp', + format: 'string', + }, + }), + + createLabeledControl({ + scope: '#/properties/rawConfig/properties/includePatterns', + label: 'Include Patterns', + description: 'File patterns to specifically include (one per line, supports wildcards)', + controlOptions: { + multi: true, + placeholder: '*.pdf', + format: 'string', + }, + }), + ]; + + const rawConfigProperties: Record = { + rawConfig: { + type: 'object', + title: 'Raw Backup Configuration', + description: 'Configuration for direct file backup', + properties: { + sourcePath: { + type: 'string', + title: 'Source Path', + description: 'Source path to backup', + minLength: 1, + }, + excludePatterns: { + type: 'array', + title: 'Exclude Patterns', + description: 'Patterns to exclude from backup', + items: { + type: 'string', + }, + default: [], + }, + includePatterns: { + type: 'array', + title: 'Include Patterns', + description: 'Patterns to include in backup', + items: { + type: 'string', + }, + default: [], + }, + }, + required: ['sourcePath'], + }, + }; + + const conditionalLayoutElement: UIElement = { + type: 'VerticalLayout', + rule: { + effect: RuleEffect.SHOW, + condition: { + scope: '#/properties/backupMode', + schema: { const: BackupMode.RAW }, + } as SchemaBasedCondition, + }, + elements: rawConfigElements, + }; + + return { + properties: rawConfigProperties, + elements: [conditionalLayoutElement], + }; +} + +function getAdvancedBackupConfigSlice(): SettingSlice { const advancedConfigElements: UIElement[] = [ createLabeledControl({ scope: '#/properties/rcloneOptions/properties/transfers', @@ -275,7 +373,7 @@ function getAdvancedBackupConfigSlice({ showAdvanced }: { showAdvanced: boolean const verticalLayoutElement: UIElement = { type: 'VerticalLayout', elements: advancedConfigElements, - options: { step: 1, showDividers: true }, + options: { step: 2, showDividers: true }, }; return { @@ -284,27 +382,411 @@ function getAdvancedBackupConfigSlice({ showAdvanced }: { showAdvanced: boolean }; } -export function buildBackupJobConfigSchema({ - remoteNames = [], - showAdvanced = false, -}: { - remoteNames?: string[]; - showAdvanced?: boolean; -}): { +function getPreprocessingConfigSlice(): SettingSlice { + const preprocessingElements: UIElement[] = [ + { + type: 'Label', + text: 'Preprocessing Configuration', + options: { + description: + 'Configure preprocessing steps to run before backup (e.g., ZFS snapshots, Flash backup, custom scripts).', + }, + } as LabelElement, + + createLabeledControl({ + scope: '#/properties/preprocessConfig/properties/type', + label: 'Preprocessing Type', + description: 'Select the type of preprocessing to perform before backup', + controlOptions: { + suggestions: [ + { + value: 'ZFS', + label: 'ZFS Snapshot', + tooltip: 'Create ZFS snapshot and stream it', + }, + { + value: 'FLASH', + label: 'Flash Backup', + tooltip: 'Backup Unraid flash drive with git history', + }, + { + value: 'SCRIPT', + label: 'Custom Script', + tooltip: 'Run custom script before backup', + }, + ], + }, + }), + + createLabeledControl({ + scope: '#/properties/preprocessConfig/properties/timeout', + label: 'Timeout (seconds)', + description: 'Maximum time to wait for preprocessing to complete (default: 300 seconds)', + controlOptions: { + placeholder: '300', + format: 'number', + }, + }), + + createLabeledControl({ + scope: '#/properties/preprocessConfig/properties/cleanupOnFailure', + label: 'Cleanup on Failure', + description: 'Whether to clean up preprocessing artifacts if the backup fails', + controlOptions: { + toggle: true, + }, + }), + + // ZFS Configuration + { + type: 'VerticalLayout', + rule: { + effect: RuleEffect.SHOW, + condition: { + scope: '#/properties/preprocessConfig/properties/type', + schema: { const: 'ZFS' }, + } as SchemaBasedCondition, + }, + elements: [ + { + type: 'Label', + text: 'ZFS Configuration', + options: { + description: 'Configure ZFS snapshot settings for preprocessing.', + }, + } as LabelElement, + + createLabeledControl({ + scope: '#/properties/preprocessConfig/properties/zfsConfig/properties/poolName', + label: 'ZFS Pool Name', + description: 'Name of the ZFS pool containing the dataset', + controlOptions: { + placeholder: 'tank', + format: 'string', + }, + }), + + createLabeledControl({ + scope: '#/properties/preprocessConfig/properties/zfsConfig/properties/datasetName', + label: 'Dataset Name', + description: 'Name of the ZFS dataset to snapshot', + controlOptions: { + placeholder: 'data/documents', + format: 'string', + }, + }), + + createLabeledControl({ + scope: '#/properties/preprocessConfig/properties/zfsConfig/properties/snapshotPrefix', + label: 'Snapshot Prefix', + description: 'Prefix for snapshot names (default: backup)', + controlOptions: { + placeholder: 'backup', + format: 'string', + }, + }), + + createLabeledControl({ + scope: '#/properties/preprocessConfig/properties/zfsConfig/properties/cleanupSnapshots', + label: 'Cleanup Snapshots', + description: 'Whether to clean up snapshots after backup', + controlOptions: { + toggle: true, + }, + }), + + createLabeledControl({ + scope: '#/properties/preprocessConfig/properties/zfsConfig/properties/retainSnapshots', + label: 'Retain Snapshots', + description: 'Number of snapshots to retain (0 = keep all)', + controlOptions: { + placeholder: '5', + format: 'number', + }, + }), + ], + }, + + // Flash Configuration + { + type: 'VerticalLayout', + rule: { + effect: RuleEffect.SHOW, + condition: { + scope: '#/properties/preprocessConfig/properties/type', + schema: { const: 'FLASH' }, + } as SchemaBasedCondition, + }, + elements: [ + { + type: 'Label', + text: 'Flash Backup Configuration', + options: { + description: 'Configure Unraid flash drive backup settings.', + }, + } as LabelElement, + + createLabeledControl({ + scope: '#/properties/preprocessConfig/properties/flashConfig/properties/flashPath', + label: 'Flash Path', + description: 'Path to the Unraid flash drive (default: /boot)', + controlOptions: { + placeholder: '/boot', + format: 'string', + }, + }), + + createLabeledControl({ + scope: '#/properties/preprocessConfig/properties/flashConfig/properties/includeGitHistory', + label: 'Include Git History', + description: 'Whether to include git history in the backup', + controlOptions: { + toggle: true, + }, + }), + + createLabeledControl({ + scope: '#/properties/preprocessConfig/properties/flashConfig/properties/additionalPaths', + label: 'Additional Paths', + description: 'Additional paths to include in flash backup (one per line)', + controlOptions: { + multi: true, + placeholder: '/boot/config/plugins', + format: 'string', + }, + }), + ], + }, + + // Script Configuration + { + type: 'VerticalLayout', + rule: { + effect: RuleEffect.SHOW, + condition: { + scope: '#/properties/preprocessConfig/properties/type', + schema: { const: 'SCRIPT' }, + } as SchemaBasedCondition, + }, + elements: [ + { + type: 'Label', + text: 'Custom Script Configuration', + options: { + description: 'Configure custom script execution settings.', + }, + } as LabelElement, + + createLabeledControl({ + scope: '#/properties/preprocessConfig/properties/scriptConfig/properties/scriptPath', + label: 'Script Path', + description: 'Full path to the script to execute', + controlOptions: { + placeholder: '/mnt/user/scripts/backup-prep.sh', + format: 'string', + }, + }), + + createLabeledControl({ + scope: '#/properties/preprocessConfig/properties/scriptConfig/properties/scriptArgs', + label: 'Script Arguments', + description: 'Arguments to pass to the script (one per line)', + controlOptions: { + multi: true, + placeholder: '--verbose', + format: 'string', + }, + }), + + createLabeledControl({ + scope: '#/properties/preprocessConfig/properties/scriptConfig/properties/workingDirectory', + label: 'Working Directory', + description: 'Working directory for script execution', + controlOptions: { + placeholder: '/tmp', + format: 'string', + }, + }), + + createLabeledControl({ + scope: '#/properties/preprocessConfig/properties/scriptConfig/properties/outputPath', + label: 'Output Path', + description: 'Path where script should write output files for backup', + controlOptions: { + placeholder: '/tmp/backup-output', + format: 'string', + }, + }), + ], + }, + ]; + + const preprocessingProperties: Record = { + preprocessConfig: { + type: 'object', + title: 'Preprocessing Configuration', + description: 'Configuration for preprocessing steps before backup', + properties: { + type: { + type: 'string', + title: 'Preprocessing Type', + description: 'Type of preprocessing to perform', + enum: ['ZFS', 'FLASH', 'SCRIPT'], + }, + timeout: { + type: 'integer', + title: 'Timeout', + description: 'Timeout in seconds for preprocessing', + minimum: 30, + maximum: 3600, + default: 300, + }, + cleanupOnFailure: { + type: 'boolean', + title: 'Cleanup on Failure', + description: 'Clean up preprocessing artifacts on failure', + default: true, + }, + zfsConfig: { + type: 'object', + title: 'ZFS Configuration', + properties: { + poolName: { + type: 'string', + title: 'Pool Name', + description: 'ZFS pool name', + minLength: 1, + }, + datasetName: { + type: 'string', + title: 'Dataset Name', + description: 'ZFS dataset name', + minLength: 1, + }, + snapshotPrefix: { + type: 'string', + title: 'Snapshot Prefix', + description: 'Prefix for snapshot names', + default: 'backup', + }, + cleanupSnapshots: { + type: 'boolean', + title: 'Cleanup Snapshots', + description: 'Clean up snapshots after backup', + default: true, + }, + retainSnapshots: { + type: 'integer', + title: 'Retain Snapshots', + description: 'Number of snapshots to retain', + minimum: 0, + default: 5, + }, + }, + required: ['poolName', 'datasetName'], + }, + flashConfig: { + type: 'object', + title: 'Flash Configuration', + properties: { + flashPath: { + type: 'string', + title: 'Flash Path', + description: 'Path to flash drive', + default: '/boot', + }, + includeGitHistory: { + type: 'boolean', + title: 'Include Git History', + description: 'Include git history in backup', + default: true, + }, + additionalPaths: { + type: 'array', + title: 'Additional Paths', + description: 'Additional paths to include', + items: { + type: 'string', + }, + default: [], + }, + }, + }, + scriptConfig: { + type: 'object', + title: 'Script Configuration', + properties: { + scriptPath: { + type: 'string', + title: 'Script Path', + description: 'Path to script file', + minLength: 1, + }, + scriptArgs: { + type: 'array', + title: 'Script Arguments', + description: 'Arguments for script', + items: { + type: 'string', + }, + default: [], + }, + workingDirectory: { + type: 'string', + title: 'Working Directory', + description: 'Working directory for script', + default: '/tmp', + }, + outputPath: { + type: 'string', + title: 'Output Path', + description: 'Path for script output', + minLength: 1, + }, + }, + required: ['scriptPath', 'outputPath'], + }, + }, + required: ['type'], + }, + }; + + const conditionalLayoutElement: UIElement = { + type: 'VerticalLayout', + rule: { + effect: RuleEffect.SHOW, + condition: { + scope: '#/properties/backupMode', + schema: { const: BackupMode.PREPROCESSING }, + } as SchemaBasedCondition, + }, + elements: preprocessingElements, + }; + + return { + properties: preprocessingProperties, + elements: [conditionalLayoutElement], + }; +} + +export function buildBackupJobConfigSchema({ remotes = [] }: { remotes?: RCloneRemote[] }): { dataSchema: { properties: DataSlice; type: 'object' }; uiSchema: Layout; } { const slicesToMerge: SettingSlice[] = []; - const basicSlice = getBasicBackupConfigSlice({ remoteNames }); + const basicSlice = getBasicBackupConfigSlice({ remotes }); slicesToMerge.push(basicSlice); - const advancedSlice = getAdvancedBackupConfigSlice({ showAdvanced }); - if ( - showAdvanced && - (advancedSlice.elements.length > 0 || Object.keys(advancedSlice.properties).length > 0) - ) { - slicesToMerge.push(advancedSlice); + const preprocessingSlice = getPreprocessingConfigSlice(); + slicesToMerge.push(preprocessingSlice); + + const rawBackupSlice = getRawBackupConfigSlice(); + slicesToMerge.push(rawBackupSlice); + + const advancedSlice = getAdvancedBackupConfigSlice(); + if (Object.keys(advancedSlice.properties).length > 0) { + slicesToMerge.push({ properties: advancedSlice.properties, elements: [] }); } const mergedSlices = mergeSettingSlices(slicesToMerge); @@ -314,18 +796,33 @@ export function buildBackupJobConfigSchema({ properties: mergedSlices.properties, }; - const steps = [{ label: 'Backup Configuration', description: 'Basic backup job settings' }]; + const steps = [ + { label: 'Backup Configuration', description: 'Basic backup job settings and mode selection' }, + { + label: 'Source Configuration', + description: 'Configure backup source (preprocessing or raw files)', + }, + { label: 'Advanced Options', description: 'RClone-specific settings' }, + ]; - if (showAdvanced) { - steps.push({ label: 'Advanced Options', description: 'RClone-specific settings' }); - } + const step0Elements = basicSlice.elements; + + const step1WrapperLayout: UIElement = { + type: 'VerticalLayout', + elements: [...(preprocessingSlice.elements || []), ...(rawBackupSlice.elements || [])], + options: { step: 1 }, + }; + + const step2Elements = advancedSlice.elements; const steppedLayoutElement: UIElement = { type: 'SteppedLayout', options: { steps: steps, }, - elements: mergedSlices.elements, + elements: [...(step0Elements || []), step1WrapperLayout, ...(step2Elements || [])].filter( + (el) => el + ) as UIElement[], }; const titleLabel: UIElement = { diff --git a/api/src/unraid-api/graph/resolvers/backup/preprocessing/flash-preprocessing.service.ts b/api/src/unraid-api/graph/resolvers/backup/preprocessing/flash-preprocessing.service.ts new file mode 100644 index 000000000..cbb3dea36 --- /dev/null +++ b/api/src/unraid-api/graph/resolvers/backup/preprocessing/flash-preprocessing.service.ts @@ -0,0 +1,261 @@ +import { Injectable, Logger } from '@nestjs/common'; +import { access, mkdir, writeFile } from 'fs/promises'; +import { dirname, join } from 'path'; + +import { execa } from 'execa'; + +import { FlashValidationService } from '@app/unraid-api/graph/resolvers/backup/preprocessing/flash-validation.service.js'; +import { + FlashPreprocessConfigInput, + PreprocessResult, + PreprocessType, +} from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.js'; +import { + RCloneApiService, + StreamingBackupOptions, +} from '@app/unraid-api/graph/resolvers/rclone/rclone-api.service.js'; + +@Injectable() +export class FlashPreprocessingService { + private readonly logger = new Logger(FlashPreprocessingService.name); + + constructor( + private readonly rcloneApiService: RCloneApiService, + private readonly flashValidationService: FlashValidationService + ) {} + + async executeFlashPreprocessing( + config: FlashPreprocessConfigInput, + remoteName: string, + remotePath: string, + timeout: number = 3600000 + ): Promise { + // Validate configuration first + const validationResult = await this.flashValidationService.validateFlashConfig(config); + if (!validationResult.isValid) { + return { + success: false, + error: `Flash configuration validation failed: ${validationResult.errors.join(', ')}`, + metadata: { + validationErrors: validationResult.errors, + validationWarnings: validationResult.warnings, + }, + }; + } + + // Log any warnings + if (validationResult.warnings.length > 0) { + this.logger.warn(`Flash preprocessing warnings: ${validationResult.warnings.join(', ')}`); + } + + const tempGitPath = join(config.flashPath, '.git-backup-temp'); + let gitRepoInitialized = false; + + try { + // Initialize git repository if needed and includeGitHistory is enabled + if (config.includeGitHistory) { + gitRepoInitialized = await this.initializeGitRepository(config.flashPath, tempGitPath); + if (gitRepoInitialized) { + this.logger.log(`Initialized git repository for Flash backup at: ${tempGitPath}`); + } + } + + // Stream the Flash backup directly to rclone + const streamingResult = await this.streamFlashBackup( + config, + remoteName, + remotePath, + tempGitPath, + gitRepoInitialized, + timeout + ); + + // Cleanup temporary git repository + if (gitRepoInitialized) { + await this.cleanupTempGitRepo(tempGitPath); + this.logger.log(`Cleaned up temporary git repository: ${tempGitPath}`); + } + + return { + success: true, + outputPath: `${remoteName}:${remotePath}`, + metadata: { + flashPath: config.flashPath, + gitHistoryIncluded: config.includeGitHistory && gitRepoInitialized, + additionalPaths: config.additionalPaths, + bytesTransferred: streamingResult.bytesTransferred, + duration: streamingResult.duration, + validationWarnings: validationResult.warnings, + flashInfo: validationResult.metadata, + jobId: streamingResult.jobId, + }, + }; + } catch (error: unknown) { + const errorMessage = error instanceof Error ? error.message : String(error); + this.logger.error( + `Flash preprocessing failed: ${errorMessage}`, + error instanceof Error ? error.stack : undefined + ); + + // Cleanup temporary git repository on failure + if (gitRepoInitialized) { + try { + await this.cleanupTempGitRepo(tempGitPath); + this.logger.log(`Cleaned up temporary git repository after failure: ${tempGitPath}`); + } catch (cleanupError: unknown) { + const cleanupErrorMessage = + cleanupError instanceof Error ? cleanupError.message : String(cleanupError); + this.logger.error( + `Failed to cleanup temporary git repository: ${cleanupErrorMessage}` + ); + } + } + + return { + success: false, + error: errorMessage, + cleanupRequired: gitRepoInitialized, + metadata: { + flashPath: config.flashPath, + gitRepoInitialized, + cleanupAttempted: gitRepoInitialized, + }, + }; + } + } + + private async initializeGitRepository(flashPath: string, tempGitPath: string): Promise { + try { + // Check if git repository already exists + const existingGitPath = join(flashPath, '.git'); + const hasExistingRepo = await this.flashValidationService.validateGitRepository(flashPath); + + if (hasExistingRepo) { + // Copy existing .git directory to temp location + await execa('cp', ['-r', existingGitPath, tempGitPath]); + this.logger.log('Copied existing git repository to temporary location'); + return true; + } + + // Initialize new git repository in temp location + await mkdir(tempGitPath, { recursive: true }); + await execa('git', ['init'], { cwd: tempGitPath }); + + // Create a gitignore file to exclude sensitive files + const gitignorePath = join(tempGitPath, '.gitignore'); + const gitignoreContent = [ + '# Exclude sensitive files', + '*.key', + '*.pem', + '*.p12', + '*.pfx', + 'config/passwd', + 'config/shadow', + 'config/ssh/', + 'config/ssl/', + 'config/wireguard/', + 'config/network.cfg', + 'config/ident.cfg', + ].join('\n'); + + await writeFile(gitignorePath, gitignoreContent); + + // Add all files to the repository + await execa('git', ['add', '.'], { cwd: flashPath }); + await execa( + 'git', + [ + '-c', + 'user.name=Unraid Backup', + '-c', + 'user.email=backup@unraid.net', + 'commit', + '-m', + 'Flash backup snapshot', + ], + { cwd: flashPath } + ); + + // Move .git directory to temp location + await execa('mv', [join(flashPath, '.git'), tempGitPath]); + + this.logger.log('Initialized new git repository for Flash backup'); + return true; + } catch (error: unknown) { + const errorMessage = error instanceof Error ? error.message : String(error); + this.logger.warn(`Failed to initialize git repository: ${errorMessage}`); + return false; + } + } + + private async streamFlashBackup( + config: FlashPreprocessConfigInput, + remoteName: string, + remotePath: string, + tempGitPath: string, + includeGit: boolean, + timeout: number + ): Promise<{ bytesTransferred?: number; duration: number; jobId?: string }> { + // Build tar command arguments + const tarArgs = ['cf', '-']; + + // Add flash directory contents (exclude .git-backup-temp to avoid conflicts) + tarArgs.push('--exclude=.git-backup-temp', '-C', config.flashPath, '.'); + + // Add git repository if available + if (includeGit) { + tarArgs.push('-C', dirname(tempGitPath), '.git-backup-temp'); + } + + // Add additional paths if specified + if (config.additionalPaths && config.additionalPaths.length > 0) { + for (const additionalPath of config.additionalPaths) { + try { + await access(additionalPath); + tarArgs.push('-C', dirname(additionalPath), '.'); + } catch (error: unknown) { + this.logger.warn(`Skipping inaccessible additional path: ${additionalPath}`); + } + } + } + + const streamingOptions: StreamingBackupOptions = { + remoteName, + remotePath, + sourceCommand: 'tar', + sourceArgs: tarArgs, + preprocessType: PreprocessType.FLASH, + timeout, + onProgress: (progress) => { + this.logger.debug(`Flash backup streaming progress: ${progress}%`); + }, + onOutput: (data) => { + this.logger.debug(`Flash backup output: ${data.slice(0, 100)}...`); + }, + onError: (error) => { + this.logger.error(`Flash backup error: ${error}`); + }, + }; + + const result = await this.rcloneApiService.startStreamingBackup(streamingOptions); + + if (!result.success) { + throw new Error(result.error || 'Flash backup streaming failed'); + } + + return { + bytesTransferred: result.bytesTransferred, + duration: result.duration, + jobId: result.jobId, + }; + } + + private async cleanupTempGitRepo(tempGitPath: string): Promise { + try { + await execa('rm', ['-rf', tempGitPath]); + } catch (error: unknown) { + const errorMessage = error instanceof Error ? error.message : String(error); + throw new Error(`Temporary git repository cleanup failed: ${errorMessage}`); + } + } +} diff --git a/api/src/unraid-api/graph/resolvers/backup/preprocessing/flash-validation.service.ts b/api/src/unraid-api/graph/resolvers/backup/preprocessing/flash-validation.service.ts new file mode 100644 index 000000000..1e6d43f0c --- /dev/null +++ b/api/src/unraid-api/graph/resolvers/backup/preprocessing/flash-validation.service.ts @@ -0,0 +1,260 @@ +import { Injectable, Logger } from '@nestjs/common'; +import { access, stat } from 'fs/promises'; +import { join } from 'path'; + +import { execa } from 'execa'; + +import { FlashPreprocessConfigInput } from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.js'; + +export interface FlashValidationResult { + isValid: boolean; + errors: string[]; + warnings: string[]; + metadata: { + flashPathExists?: boolean; + flashPathMounted?: boolean; + gitRepoExists?: boolean; + gitRepoSize?: number | null; + additionalPathsValid?: boolean[]; + totalSize?: number | null; + availableSpace?: number | null; + }; +} + +@Injectable() +export class FlashValidationService { + private readonly logger = new Logger(FlashValidationService.name); + + async validateFlashConfig(config: FlashPreprocessConfigInput): Promise { + const result: FlashValidationResult = { + isValid: true, + errors: [], + warnings: [], + metadata: {}, + }; + + try { + // Validate flash path exists and is accessible + const flashPathValid = await this.validateFlashPath(config.flashPath); + result.metadata.flashPathExists = flashPathValid; + + if (!flashPathValid) { + result.errors.push( + `Flash path '${config.flashPath}' does not exist or is not accessible` + ); + result.isValid = false; + return result; + } + + // Check if flash path is mounted + const isMounted = await this.isFlashMounted(config.flashPath); + result.metadata.flashPathMounted = isMounted; + + if (!isMounted) { + result.warnings.push(`Flash path '${config.flashPath}' may not be properly mounted`); + } + + // Validate git repository if includeGitHistory is enabled + if (config.includeGitHistory) { + const gitRepoExists = await this.validateGitRepository(config.flashPath); + result.metadata.gitRepoExists = gitRepoExists; + + if (!gitRepoExists) { + result.warnings.push( + `Git repository not found in '${config.flashPath}'. Git history will be skipped.` + ); + } else { + const gitRepoSize = await this.getGitRepositorySize(config.flashPath); + result.metadata.gitRepoSize = gitRepoSize; + + if (gitRepoSize && gitRepoSize > 100 * 1024 * 1024) { + // 100MB + result.warnings.push( + `Git repository is large (${Math.round(gitRepoSize / 1024 / 1024)}MB). Backup may take longer.` + ); + } + } + } + + // Validate additional paths + if (config.additionalPaths && config.additionalPaths.length > 0) { + const pathValidations = await Promise.all( + config.additionalPaths.map((path) => this.validateAdditionalPath(path)) + ); + result.metadata.additionalPathsValid = pathValidations; + + const invalidPaths = config.additionalPaths.filter( + (_, index) => !pathValidations[index] + ); + if (invalidPaths.length > 0) { + result.warnings.push( + `Some additional paths are not accessible: ${invalidPaths.join(', ')}` + ); + } + } + + // Calculate total backup size + const totalSize = await this.calculateTotalBackupSize(config); + result.metadata.totalSize = totalSize; + + // Check available space + const availableSpace = await this.getAvailableSpace(config.flashPath); + result.metadata.availableSpace = availableSpace; + + if (totalSize && availableSpace && totalSize > availableSpace * 0.8) { + result.warnings.push( + 'Backup size may be close to available space. Monitor disk usage during backup.' + ); + } + } catch (error: unknown) { + const errorMessage = error instanceof Error ? error.message : String(error); + result.errors.push(`Validation failed: ${errorMessage}`); + result.isValid = false; + } + + return result; + } + + async validateFlashPath(flashPath: string): Promise { + try { + await access(flashPath); + const stats = await stat(flashPath); + return stats.isDirectory(); + } catch { + return false; + } + } + + async isFlashMounted(flashPath: string): Promise { + try { + // Check if the path is a mount point by comparing device IDs + const pathStat = await stat(flashPath); + const parentStat = await stat(join(flashPath, '..')); + return pathStat.dev !== parentStat.dev; + } catch { + return false; + } + } + + async validateGitRepository(flashPath: string): Promise { + const gitPath = join(flashPath, '.git'); + try { + await access(gitPath); + const stats = await stat(gitPath); + return stats.isDirectory(); + } catch { + return false; + } + } + + async getGitRepositorySize(flashPath: string): Promise { + const gitPath = join(flashPath, '.git'); + try { + const { stdout } = await execa('du', ['-sb', gitPath]); + const size = parseInt(stdout.split('\t')[0], 10); + return isNaN(size) ? null : size; + } catch { + return null; + } + } + + async validateAdditionalPath(path: string): Promise { + try { + await access(path); + return true; + } catch { + return false; + } + } + + async calculateTotalBackupSize(config: FlashPreprocessConfigInput): Promise { + try { + let totalSize = 0; + + // Get flash directory size + const { stdout: flashSize } = await execa('du', ['-sb', config.flashPath]); + totalSize += parseInt(flashSize.split('\t')[0], 10) || 0; + + // Add additional paths if specified + if (config.additionalPaths) { + for (const path of config.additionalPaths) { + try { + const { stdout: pathSize } = await execa('du', ['-sb', path]); + totalSize += parseInt(pathSize.split('\t')[0], 10) || 0; + } catch (error: unknown) { + this.logger.warn( + `Failed to get size for additional path ${path}: ${error instanceof Error ? error.message : String(error)}` + ); + } + } + } + + return totalSize; + } catch { + return null; + } + } + + async getAvailableSpace(path: string): Promise { + try { + const { stdout } = await execa('df', ['-B1', path]); + const lines = stdout.split('\n'); + if (lines.length > 1) { + const fields = lines[1].split(/\s+/); + if (fields.length >= 4) { + const available = parseInt(fields[3], 10); + return isNaN(available) ? null : available; + } + } + return null; + } catch { + return null; + } + } + + async checkGitStatus(flashPath: string): Promise<{ + hasUncommittedChanges: boolean; + currentBranch: string | null; + lastCommitHash: string | null; + }> { + const result = { + hasUncommittedChanges: false, + currentBranch: null as string | null, + lastCommitHash: null as string | null, + }; + + try { + // Check for uncommitted changes + const { stdout: statusOutput } = await execa('git', ['status', '--porcelain'], { + cwd: flashPath, + }); + result.hasUncommittedChanges = statusOutput.trim().length > 0; + + // Get current branch + try { + const { stdout: branchOutput } = await execa( + 'git', + ['rev-parse', '--abbrev-ref', 'HEAD'], + { cwd: flashPath } + ); + result.currentBranch = branchOutput.trim(); + } catch { + // Ignore branch detection errors + } + + // Get last commit hash + try { + const { stdout: commitOutput } = await execa('git', ['rev-parse', 'HEAD'], { + cwd: flashPath, + }); + result.lastCommitHash = commitOutput.trim(); + } catch { + // Ignore commit hash detection errors + } + } catch { + // Git commands failed, repository might not be initialized + } + + return result; + } +} diff --git a/api/src/unraid-api/graph/resolvers/backup/preprocessing/preprocessing-validation.service.ts b/api/src/unraid-api/graph/resolvers/backup/preprocessing/preprocessing-validation.service.ts new file mode 100644 index 000000000..1b9f3a020 --- /dev/null +++ b/api/src/unraid-api/graph/resolvers/backup/preprocessing/preprocessing-validation.service.ts @@ -0,0 +1,297 @@ +import { BadRequestException, Injectable, Logger } from '@nestjs/common'; +import { existsSync } from 'fs'; +import { access, constants } from 'fs/promises'; + +import { plainToClass } from 'class-transformer'; +import { validate, ValidationError } from 'class-validator'; +import { execa } from 'execa'; + +import { + FlashPreprocessConfigInput, + PreprocessConfigInput, + PreprocessType, + ScriptPreprocessConfigInput, + ZfsPreprocessConfigInput, +} from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.js'; + +export interface ValidatedPreprocessConfig { + type: PreprocessType; + config?: { + zfs?: ZfsPreprocessConfigInput; + flash?: FlashPreprocessConfigInput; + script?: ScriptPreprocessConfigInput; + }; + timeout: number; + cleanupOnFailure: boolean; +} + +@Injectable() +export class PreprocessConfigValidationService { + private readonly logger = new Logger(PreprocessConfigValidationService.name); + + async validateAndTransform(input: PreprocessConfigInput): Promise { + const dto = plainToClass(PreprocessConfigInput, input); + const validationErrors = await validate(dto); + + if (validationErrors.length > 0) { + const errorMessages = this.formatValidationErrors(validationErrors); + throw new BadRequestException(`Validation failed: ${errorMessages}`); + } + + const businessErrors = this.validateBusinessRules(dto); + if (businessErrors.length > 0) { + throw new BadRequestException(`Configuration errors: ${businessErrors.join('; ')}`); + } + + await this.validateAsyncRules(dto); + + return this.transformToValidatedConfig(dto); + } + + private formatValidationErrors(errors: ValidationError[]): string { + return errors + .map((error) => { + const constraints = error.constraints || {}; + return Object.values(constraints).join(', '); + }) + .join('; '); + } + + private validateBusinessRules(dto: PreprocessConfigInput): string[] { + const errors: string[] = []; + + if (dto.type !== PreprocessType.NONE) { + if (!dto.zfsConfig && !dto.flashConfig && !dto.scriptConfig) { + errors.push('Preprocessing configuration is required when type is not "none"'); + } + } + + if (dto.type === PreprocessType.ZFS && !dto.zfsConfig) { + errors.push('ZFS configuration is required when type is "zfs"'); + } + + if (dto.type === PreprocessType.FLASH && !dto.flashConfig) { + errors.push('Flash configuration is required when type is "flash"'); + } + + if (dto.type === PreprocessType.SCRIPT && !dto.scriptConfig) { + errors.push('Script configuration is required when type is "script"'); + } + + if (dto.type === PreprocessType.ZFS && dto.zfsConfig) { + errors.push(...this.validateZfsConfig(dto.zfsConfig)); + } + + if (dto.type === PreprocessType.FLASH && dto.flashConfig) { + errors.push(...this.validateFlashConfig(dto.flashConfig)); + } + + if (dto.type === PreprocessType.SCRIPT && dto.scriptConfig) { + errors.push(...this.validateScriptConfig(dto.scriptConfig)); + } + + return errors; + } + + private validateZfsConfig(config: ZfsPreprocessConfigInput): string[] { + const errors: string[] = []; + + if (config.poolName.includes('..') || config.poolName.startsWith('/')) { + errors.push('Invalid ZFS pool name format'); + } + + if (config.datasetName.includes('..') || config.datasetName.includes('//')) { + errors.push('Invalid ZFS dataset name format'); + } + + if (config.retainSnapshots && config.retainSnapshots < 1) { + errors.push('Retain snapshots must be at least 1'); + } + + return errors; + } + + private validateFlashConfig(config: FlashPreprocessConfigInput): string[] { + const errors: string[] = []; + + if (!config.flashPath.startsWith('/')) { + errors.push('Flash path must be an absolute path'); + } + + if (config.additionalPaths) { + for (const path of config.additionalPaths) { + if (!path.startsWith('/')) { + errors.push(`Additional path "${path}" must be an absolute path`); + } + } + } + + return errors; + } + + private validateScriptConfig(config: ScriptPreprocessConfigInput): string[] { + const errors: string[] = []; + + if (!config.scriptPath.startsWith('/')) { + errors.push('Script path must be an absolute path'); + } + + if (!config.scriptPath.match(/\.(sh|py|pl|js)$/)) { + errors.push('Script must have a valid extension (.sh, .py, .pl, .js)'); + } + + if (!config.outputPath.startsWith('/')) { + errors.push('Output path must be an absolute path'); + } + + if ( + config.scriptArgs?.some((arg) => arg.includes(';') || arg.includes('|') || arg.includes('&')) + ) { + errors.push('Script arguments cannot contain shell operators (;, |, &)'); + } + + if (config.workingDirectory && !config.workingDirectory.startsWith('/')) { + errors.push('Working directory must be an absolute path'); + } + + return errors; + } + + private async validateAsyncRules(dto: PreprocessConfigInput): Promise { + if (dto.type === PreprocessType.ZFS && dto.zfsConfig) { + const poolExists = await this.validateZfsPool(dto.zfsConfig.poolName); + if (!poolExists) { + throw new BadRequestException(`ZFS pool '${dto.zfsConfig.poolName}' does not exist`); + } + + const datasetExists = await this.validateZfsDataset( + dto.zfsConfig.poolName, + dto.zfsConfig.datasetName + ); + if (!datasetExists) { + throw new BadRequestException( + `ZFS dataset '${dto.zfsConfig.poolName}/${dto.zfsConfig.datasetName}' does not exist` + ); + } + } + + if (dto.type === PreprocessType.SCRIPT && dto.scriptConfig) { + const scriptExists = await this.validateScriptExists(dto.scriptConfig.scriptPath); + if (!scriptExists) { + throw new BadRequestException( + `Script '${dto.scriptConfig.scriptPath}' does not exist or is not executable` + ); + } + } + + if (dto.type === PreprocessType.FLASH && dto.flashConfig) { + const flashPathExists = await this.validateFlashPath(dto.flashConfig.flashPath); + if (!flashPathExists) { + throw new BadRequestException( + `Flash path '${dto.flashConfig.flashPath}' does not exist` + ); + } + } + } + + async validateZfsPool(poolName: string): Promise { + try { + const result = await execa('zpool', ['list', '-H', '-o', 'name'], { timeout: 5000 }); + const pools = result.stdout.split('\n').filter((line) => line.trim()); + return pools.includes(poolName); + } catch (error) { + this.logger.warn(`Failed to check ZFS pool existence: ${error}`); + return false; + } + } + + async validateZfsDataset(poolName: string, datasetName: string): Promise { + try { + const fullDatasetName = `${poolName}/${datasetName}`; + const result = await execa('zfs', ['list', '-H', '-o', 'name', fullDatasetName], { + timeout: 5000, + }); + return result.stdout.trim() === fullDatasetName; + } catch (error) { + this.logger.warn(`Failed to check ZFS dataset existence: ${error}`); + return false; + } + } + + async validateScriptExists(scriptPath: string): Promise { + try { + if (!existsSync(scriptPath)) { + return false; + } + + await access(scriptPath, constants.F_OK | constants.X_OK); + return true; + } catch (error) { + this.logger.warn(`Failed to validate script: ${error}`); + return false; + } + } + + async validateFlashPath(flashPath: string): Promise { + try { + await access(flashPath, constants.F_OK | constants.R_OK); + return true; + } catch (error) { + this.logger.warn(`Failed to validate flash path: ${error}`); + return false; + } + } + + private transformToValidatedConfig(dto: PreprocessConfigInput): ValidatedPreprocessConfig { + const config: ValidatedPreprocessConfig = { + type: dto.type, + timeout: dto.timeout, + cleanupOnFailure: dto.cleanupOnFailure, + }; + + if (dto.type !== PreprocessType.NONE) { + config.config = {}; + + if (dto.zfsConfig) { + config.config.zfs = dto.zfsConfig; + } + + if (dto.flashConfig) { + config.config.flash = dto.flashConfig; + } + + if (dto.scriptConfig) { + config.config.script = dto.scriptConfig; + } + } + + return config; + } + + async validatePreprocessingCapabilities(): Promise<{ + zfsAvailable: boolean; + flashAvailable: boolean; + scriptingAvailable: boolean; + }> { + const [zfsAvailable, flashAvailable] = await Promise.all([ + this.checkZfsAvailability(), + this.validateFlashPath('/boot'), + ]); + + return { + zfsAvailable, + flashAvailable, + scriptingAvailable: true, + }; + } + + private async checkZfsAvailability(): Promise { + try { + await execa('which', ['zfs'], { timeout: 2000 }); + await execa('which', ['zpool'], { timeout: 2000 }); + return true; + } catch (error) { + return false; + } + } +} diff --git a/api/src/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.module.ts b/api/src/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.module.ts new file mode 100644 index 000000000..85456bb96 --- /dev/null +++ b/api/src/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.module.ts @@ -0,0 +1,38 @@ +import { forwardRef, Module } from '@nestjs/common'; + +import { FlashPreprocessingService } from '@app/unraid-api/graph/resolvers/backup/preprocessing/flash-preprocessing.service.js'; +import { FlashValidationService } from '@app/unraid-api/graph/resolvers/backup/preprocessing/flash-validation.service.js'; +import { PreprocessConfigValidationService } from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing-validation.service.js'; +import { PreprocessingService } from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.service.js'; +import { ScriptPreprocessingService } from '@app/unraid-api/graph/resolvers/backup/preprocessing/script-preprocessing.service.js'; +import { StreamingJobManager } from '@app/unraid-api/graph/resolvers/backup/preprocessing/streaming-job-manager.service.js'; +import { ZfsPreprocessingService } from '@app/unraid-api/graph/resolvers/backup/preprocessing/zfs-preprocessing.service.js'; +import { ZfsValidationService } from '@app/unraid-api/graph/resolvers/backup/preprocessing/zfs-validation.service.js'; +import { RCloneApiService } from '@app/unraid-api/graph/resolvers/rclone/rclone-api.service.js'; +import { RCloneModule } from '@app/unraid-api/graph/resolvers/rclone/rclone.module.js'; + +@Module({ + imports: [forwardRef(() => RCloneModule)], + providers: [ + RCloneApiService, + PreprocessingService, + PreprocessConfigValidationService, + StreamingJobManager, + ZfsPreprocessingService, + FlashPreprocessingService, + ScriptPreprocessingService, + ZfsValidationService, + FlashValidationService, + ], + exports: [ + PreprocessingService, + PreprocessConfigValidationService, + StreamingJobManager, + ZfsPreprocessingService, + FlashPreprocessingService, + ScriptPreprocessingService, + ZfsValidationService, + FlashValidationService, + ], +}) +export class PreprocessingModule {} diff --git a/api/src/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.service.ts b/api/src/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.service.ts new file mode 100644 index 000000000..9053c1401 --- /dev/null +++ b/api/src/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.service.ts @@ -0,0 +1,441 @@ +import { BadRequestException, Injectable, Logger } from '@nestjs/common'; +import { EventEmitter } from 'events'; +import { existsSync } from 'fs'; +import { unlink } from 'fs/promises'; + +import { v4 as uuidv4 } from 'uuid'; + +import { + PreprocessConfigValidationService, + ValidatedPreprocessConfig, +} from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing-validation.service.js'; +import { + PreprocessConfigInput, + PreprocessResult, + PreprocessType, + StreamingJobInfo, +} from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.js'; +import { + StreamingJobManager, + StreamingJobOptions, + StreamingJobResult, +} from '@app/unraid-api/graph/resolvers/backup/preprocessing/streaming-job-manager.service.js'; + +export interface PreprocessingOptions { + jobId?: string; + onProgress?: (progress: number) => void; + onOutput?: (data: string) => void; + onError?: (error: string) => void; +} + +@Injectable() +export class PreprocessingService extends EventEmitter { + private readonly logger = new Logger(PreprocessingService.name); + private readonly activePreprocessJobs = new Map(); + + constructor( + private readonly validationService: PreprocessConfigValidationService, + private readonly streamingJobManager: StreamingJobManager + ) { + super(); + this.setupEventListeners(); + } + + async executePreprocessing( + config: PreprocessConfigInput, + options: PreprocessingOptions = {} + ): Promise { + const jobId = options.jobId || uuidv4(); + + try { + this.logger.log(`Starting preprocessing job ${jobId} with type: ${config.type}`); + + const validatedConfig = await this.validationService.validateAndTransform(config); + + if (validatedConfig.type === PreprocessType.NONE) { + return this.createSuccessResult(jobId, { type: 'none' }); + } + + const result = await this.executePreprocessingByType(validatedConfig, jobId, options); + + this.activePreprocessJobs.set(jobId, result); + this.emit('preprocessingCompleted', { jobId, result }); + + return result; + } catch (error) { + const errorMessage = error instanceof Error ? error.message : 'Unknown error'; + this.logger.error(`Preprocessing job ${jobId} failed: ${errorMessage}`, error); + + const result = this.createErrorResult(jobId, errorMessage); + this.activePreprocessJobs.set(jobId, result); + this.emit('preprocessingFailed', { jobId, result, error }); + + if (config.cleanupOnFailure) { + await this.cleanup(jobId); + } + + return result; + } + } + + private async executePreprocessingByType( + config: ValidatedPreprocessConfig, + jobId: string, + options: PreprocessingOptions + ): Promise { + switch (config.type) { + case PreprocessType.ZFS: + return this.executeZfsPreprocessing(config, jobId, options); + + case PreprocessType.FLASH: + return this.executeFlashPreprocessing(config, jobId, options); + + case PreprocessType.SCRIPT: + return this.executeScriptPreprocessing(config, jobId, options); + + default: + throw new BadRequestException(`Unsupported preprocessing type: ${config.type}`); + } + } + + private async executeZfsPreprocessing( + config: ValidatedPreprocessConfig, + jobId: string, + options: PreprocessingOptions + ): Promise { + const zfsConfig = config.config?.zfs; + if (!zfsConfig) { + throw new BadRequestException('ZFS configuration is required'); + } + + const snapshotName = `${zfsConfig.snapshotPrefix || 'backup'}-${Date.now()}`; + const datasetPath = `${zfsConfig.poolName}/${zfsConfig.datasetName}`; + const fullSnapshotName = `${datasetPath}@${snapshotName}`; + + try { + const createSnapshotOptions: StreamingJobOptions = { + command: 'zfs', + args: ['snapshot', fullSnapshotName], + timeout: config.timeout * 1000, + onProgress: options.onProgress, + onOutput: options.onOutput, + onError: options.onError, + }; + + const { promise: snapshotPromise } = await this.streamingJobManager.startStreamingJob( + PreprocessType.ZFS, + createSnapshotOptions + ); + + const snapshotResult = await snapshotPromise; + + if (!snapshotResult.success) { + throw new Error(`Failed to create ZFS snapshot: ${snapshotResult.error}`); + } + + this.logger.log(`Created ZFS snapshot: ${fullSnapshotName}`); + + return this.createSuccessResult(jobId, { + type: 'zfs', + snapshotName: fullSnapshotName, + streamPath: fullSnapshotName, + cleanupRequired: zfsConfig.cleanupSnapshots, + metadata: { + poolName: zfsConfig.poolName, + datasetName: zfsConfig.datasetName, + snapshotPrefix: zfsConfig.snapshotPrefix, + retainSnapshots: zfsConfig.retainSnapshots, + }, + }); + } catch (error) { + const errorMessage = error instanceof Error ? error.message : 'Unknown ZFS error'; + this.logger.error(`ZFS preprocessing failed for job ${jobId}: ${errorMessage}`); + + if (config.cleanupOnFailure) { + await this.cleanupZfsSnapshot(fullSnapshotName); + } + + throw error; + } + } + + private async executeFlashPreprocessing( + config: ValidatedPreprocessConfig, + jobId: string, + options: PreprocessingOptions + ): Promise { + const flashConfig = config.config?.flash; + if (!flashConfig) { + throw new BadRequestException('Flash configuration is required'); + } + + const gitRepoPath = `${flashConfig.flashPath}/.git`; + + try { + if (flashConfig.includeGitHistory && !existsSync(gitRepoPath)) { + const initOptions: StreamingJobOptions = { + command: 'git', + args: ['init'], + cwd: flashConfig.flashPath, + timeout: config.timeout * 1000, + onProgress: options.onProgress, + onOutput: options.onOutput, + onError: options.onError, + }; + + const { promise: initPromise } = await this.streamingJobManager.startStreamingJob( + PreprocessType.FLASH, + initOptions + ); + + const initResult = await initPromise; + + if (!initResult.success) { + throw new Error(`Failed to initialize git repository: ${initResult.error}`); + } + + const addOptions: StreamingJobOptions = { + command: 'git', + args: ['add', '.'], + cwd: flashConfig.flashPath, + timeout: config.timeout * 1000, + onProgress: options.onProgress, + onOutput: options.onOutput, + onError: options.onError, + }; + + const { promise: addPromise } = await this.streamingJobManager.startStreamingJob( + PreprocessType.FLASH, + addOptions + ); + + const addResult = await addPromise; + + if (!addResult.success) { + this.logger.warn(`Git add failed, continuing: ${addResult.error}`); + } + + const commitOptions: StreamingJobOptions = { + command: 'git', + args: ['commit', '-m', `Backup snapshot ${new Date().toISOString()}`], + cwd: flashConfig.flashPath, + timeout: config.timeout * 1000, + onProgress: options.onProgress, + onOutput: options.onOutput, + onError: options.onError, + }; + + const { promise: commitPromise } = await this.streamingJobManager.startStreamingJob( + PreprocessType.FLASH, + commitOptions + ); + + const commitResult = await commitPromise; + + if (!commitResult.success) { + this.logger.warn(`Git commit failed, continuing: ${commitResult.error}`); + } + } + + this.logger.log(`Flash preprocessing completed for job ${jobId}`); + + return this.createSuccessResult(jobId, { + type: 'flash', + streamPath: flashConfig.flashPath, + cleanupRequired: false, + metadata: { + flashPath: flashConfig.flashPath, + includeGitHistory: flashConfig.includeGitHistory, + additionalPaths: flashConfig.additionalPaths, + gitInitialized: !existsSync(gitRepoPath), + }, + }); + } catch (error) { + const errorMessage = error instanceof Error ? error.message : 'Unknown Flash error'; + this.logger.error(`Flash preprocessing failed for job ${jobId}: ${errorMessage}`); + throw error; + } + } + + private async executeScriptPreprocessing( + config: ValidatedPreprocessConfig, + jobId: string, + options: PreprocessingOptions + ): Promise { + const scriptConfig = config.config?.script; + if (!scriptConfig) { + throw new BadRequestException('Script configuration is required'); + } + + try { + const scriptOptions: StreamingJobOptions = { + command: scriptConfig.scriptPath, + args: scriptConfig.scriptArgs || [], + cwd: scriptConfig.workingDirectory, + env: scriptConfig.environment, + timeout: config.timeout * 1000, + onProgress: options.onProgress, + onOutput: options.onOutput, + onError: options.onError, + }; + + const { promise: scriptPromise } = await this.streamingJobManager.startStreamingJob( + PreprocessType.SCRIPT, + scriptOptions + ); + + const scriptResult = await scriptPromise; + + if (!scriptResult.success) { + throw new Error(`Script execution failed: ${scriptResult.error}`); + } + + this.logger.log(`Script preprocessing completed for job ${jobId}`); + + return this.createSuccessResult(jobId, { + type: 'script', + outputPath: scriptConfig.outputPath, + cleanupRequired: true, + metadata: { + scriptPath: scriptConfig.scriptPath, + scriptArgs: scriptConfig.scriptArgs, + workingDirectory: scriptConfig.workingDirectory, + exitCode: scriptResult.exitCode, + duration: scriptResult.duration, + }, + }); + } catch (error) { + const errorMessage = error instanceof Error ? error.message : 'Unknown Script error'; + this.logger.error(`Script preprocessing failed for job ${jobId}: ${errorMessage}`); + + if (config.cleanupOnFailure && existsSync(scriptConfig.outputPath)) { + await this.cleanupScriptOutput(scriptConfig.outputPath); + } + + throw error; + } + } + + async cleanup(jobId: string): Promise { + const result = this.activePreprocessJobs.get(jobId); + if (!result) { + this.logger.warn(`No preprocessing result found for cleanup of job ${jobId}`); + return; + } + + try { + if (result.cleanupRequired) { + switch (result.metadata?.type) { + case 'zfs': + if (result.snapshotName) { + await this.cleanupZfsSnapshot(result.snapshotName); + } + break; + + case 'script': + if (result.outputPath) { + await this.cleanupScriptOutput(result.outputPath); + } + break; + + case 'flash': + break; + } + } + + this.activePreprocessJobs.delete(jobId); + this.logger.log(`Cleanup completed for preprocessing job ${jobId}`); + } catch (error) { + this.logger.error(`Cleanup failed for job ${jobId}:`, error); + } + } + + private async cleanupZfsSnapshot(snapshotName: string): Promise { + try { + const { promise } = await this.streamingJobManager.startStreamingJob(PreprocessType.ZFS, { + command: 'zfs', + args: ['destroy', snapshotName], + timeout: 30000, + }); + + const result = await promise; + + if (result.success) { + this.logger.log(`Cleaned up ZFS snapshot: ${snapshotName}`); + } else { + this.logger.error(`Failed to cleanup ZFS snapshot ${snapshotName}: ${result.error}`); + } + } catch (error) { + this.logger.error(`Error during ZFS snapshot cleanup: ${error}`); + } + } + + private async cleanupScriptOutput(outputPath: string): Promise { + try { + if (existsSync(outputPath)) { + await unlink(outputPath); + this.logger.log(`Cleaned up script output file: ${outputPath}`); + } + } catch (error) { + this.logger.error(`Failed to cleanup script output ${outputPath}: ${error}`); + } + } + + private createSuccessResult(jobId: string, metadata: Record): PreprocessResult { + return { + success: true, + ...metadata, + metadata, + }; + } + + private createErrorResult(jobId: string, error: string): PreprocessResult { + return { + success: false, + error, + cleanupRequired: false, + }; + } + + private setupEventListeners(): void { + this.streamingJobManager.on('jobStarted', (jobInfo: StreamingJobInfo) => { + this.emit('streamingJobStarted', jobInfo); + }); + + this.streamingJobManager.on( + 'jobProgress', + ({ jobId, progress }: { jobId: string; progress: number }) => { + this.emit('preprocessingProgress', { jobId, progress }); + } + ); + + this.streamingJobManager.on( + 'jobCompleted', + ({ jobInfo, result }: { jobInfo: StreamingJobInfo; result: StreamingJobResult }) => { + this.emit('streamingJobCompleted', { jobInfo, result }); + } + ); + } + + getActiveJobs(): Map { + return new Map(this.activePreprocessJobs); + } + + getJobResult(jobId: string): PreprocessResult | undefined { + return this.activePreprocessJobs.get(jobId); + } + + async cancelJob(jobId: string): Promise { + const cancelled = this.streamingJobManager.cancelJob(jobId); + if (cancelled) { + await this.cleanup(jobId); + } + return cancelled; + } + + async cleanupAllJobs(): Promise { + const jobIds = Array.from(this.activePreprocessJobs.keys()); + await Promise.all(jobIds.map((jobId) => this.cleanup(jobId))); + await this.streamingJobManager.cleanupAllJobs(); + } +} diff --git a/api/src/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.ts b/api/src/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.ts new file mode 100644 index 000000000..90a37fb26 --- /dev/null +++ b/api/src/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.ts @@ -0,0 +1,229 @@ +import { Field, InputType, ObjectType, registerEnumType } from '@nestjs/graphql'; + +import { Type } from 'class-transformer'; +import { + IsBoolean, + IsEnum, + IsNotEmpty, + IsNumber, + IsOptional, + IsString, + Min, + ValidateIf, + ValidateNested, +} from 'class-validator'; +import { GraphQLJSON } from 'graphql-scalars'; + +export enum PreprocessType { + NONE = 'none', + ZFS = 'zfs', + FLASH = 'flash', + SCRIPT = 'script', +} + +registerEnumType(PreprocessType, { + name: 'PreprocessType', + description: 'Type of preprocessing to perform before backup', +}); + +@InputType() +export class ZfsPreprocessConfigInput { + @Field(() => String, { description: 'ZFS pool name' }) + @IsString() + @IsNotEmpty() + poolName!: string; + + @Field(() => String, { description: 'Dataset name within the pool' }) + @IsString() + @IsNotEmpty() + datasetName!: string; + + @Field(() => String, { description: 'Snapshot name prefix', nullable: true }) + @IsOptional() + @IsString() + snapshotPrefix?: string; + + @Field(() => Boolean, { + description: 'Whether to cleanup snapshots after backup', + defaultValue: true, + }) + @IsBoolean() + cleanupSnapshots!: boolean; + + @Field(() => Number, { description: 'Number of snapshots to retain', nullable: true }) + @IsOptional() + @IsNumber() + @Min(1) + retainSnapshots?: number; +} + +@ObjectType() +export class ZfsPreprocessConfig { + @Field(() => String) + poolName!: string; + + @Field(() => String) + datasetName!: string; + + @Field(() => String, { nullable: true }) + snapshotPrefix?: string; + + @Field(() => Boolean) + cleanupSnapshots!: boolean; + + @Field(() => Number, { nullable: true }) + retainSnapshots?: number; +} + +@InputType() +export class FlashPreprocessConfigInput { + @Field(() => String, { description: 'Flash drive mount path', defaultValue: '/boot' }) + @IsString() + @IsNotEmpty() + flashPath!: string; + + @Field(() => Boolean, { description: 'Whether to include git history', defaultValue: true }) + @IsBoolean() + includeGitHistory!: boolean; + + @Field(() => [String], { description: 'Additional paths to include in backup', nullable: true }) + @IsOptional() + additionalPaths?: string[]; +} + +@ObjectType() +export class FlashPreprocessConfig { + @Field(() => String) + flashPath!: string; + + @Field(() => Boolean) + includeGitHistory!: boolean; + + @Field(() => [String], { nullable: true }) + additionalPaths?: string[]; +} + +@InputType() +export class ScriptPreprocessConfigInput { + @Field(() => String, { description: 'Path to the script file' }) + @IsString() + @IsNotEmpty() + scriptPath!: string; + + @Field(() => [String], { description: 'Arguments to pass to the script', nullable: true }) + @IsOptional() + scriptArgs?: string[]; + + @Field(() => String, { description: 'Working directory for script execution', nullable: true }) + @IsOptional() + @IsString() + workingDirectory?: string; + + @Field(() => GraphQLJSON, { + description: 'Environment variables for script execution', + nullable: true, + }) + @IsOptional() + environment?: Record; + + @Field(() => String, { description: 'Output file path where script should write data' }) + @IsString() + @IsNotEmpty() + outputPath!: string; +} + +@ObjectType() +export class ScriptPreprocessConfig { + @Field(() => String) + scriptPath!: string; + + @Field(() => [String], { nullable: true }) + scriptArgs?: string[]; + + @Field(() => String, { nullable: true }) + workingDirectory?: string; + + @Field(() => GraphQLJSON, { nullable: true }) + environment?: Record; + + @Field(() => String) + outputPath!: string; +} + +@InputType() +export class PreprocessConfigInput { + @Field(() => PreprocessType, { description: 'Type of preprocessing to perform' }) + @IsEnum(PreprocessType) + type!: PreprocessType; + + @Field(() => ZfsPreprocessConfigInput, { nullable: true }) + @IsOptional() + @ValidateIf((o) => o.type === PreprocessType.ZFS) + @ValidateNested() + @Type(() => ZfsPreprocessConfigInput) + zfsConfig?: ZfsPreprocessConfigInput; + + @Field(() => FlashPreprocessConfigInput, { nullable: true }) + @IsOptional() + @ValidateIf((o) => o.type === PreprocessType.FLASH) + @ValidateNested() + @Type(() => FlashPreprocessConfigInput) + flashConfig?: FlashPreprocessConfigInput; + + @Field(() => ScriptPreprocessConfigInput, { nullable: true }) + @IsOptional() + @ValidateIf((o) => o.type === PreprocessType.SCRIPT) + @ValidateNested() + @Type(() => ScriptPreprocessConfigInput) + scriptConfig?: ScriptPreprocessConfigInput; + + @Field(() => Number, { description: 'Timeout for preprocessing in seconds', defaultValue: 3600 }) + @IsNumber() + @Min(1) + timeout!: number; + + @Field(() => Boolean, { description: 'Whether to cleanup on failure', defaultValue: true }) + @IsBoolean() + cleanupOnFailure!: boolean; +} + +@ObjectType() +export class PreprocessConfig { + @Field(() => PreprocessType) + type!: PreprocessType; + + @Field(() => ZfsPreprocessConfig, { nullable: true }) + zfsConfig?: ZfsPreprocessConfig; + + @Field(() => FlashPreprocessConfig, { nullable: true }) + flashConfig?: FlashPreprocessConfig; + + @Field(() => ScriptPreprocessConfig, { nullable: true }) + scriptConfig?: ScriptPreprocessConfig; + + @Field(() => Number) + timeout!: number; + + @Field(() => Boolean) + cleanupOnFailure!: boolean; +} + +export interface PreprocessResult { + success: boolean; + streamPath?: string; + outputPath?: string; + snapshotName?: string; + error?: string; + cleanupRequired?: boolean; + metadata?: Record; +} + +export interface StreamingJobInfo { + jobId: string; + processId: number; + startTime: Date; + type: PreprocessType; + status: 'running' | 'completed' | 'failed' | 'cancelled'; + progress?: number; + error?: string; +} diff --git a/api/src/unraid-api/graph/resolvers/backup/preprocessing/script-preprocessing.service.ts b/api/src/unraid-api/graph/resolvers/backup/preprocessing/script-preprocessing.service.ts new file mode 100644 index 000000000..e83e851d6 --- /dev/null +++ b/api/src/unraid-api/graph/resolvers/backup/preprocessing/script-preprocessing.service.ts @@ -0,0 +1,248 @@ +import { Injectable, Logger } from '@nestjs/common'; +import { promises as fs } from 'fs'; +import { dirname, join } from 'path'; + +import { execa } from 'execa'; + +import { + PreprocessResult, + ScriptPreprocessConfig, +} from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.js'; + +@Injectable() +export class ScriptPreprocessingService { + private readonly logger = new Logger(ScriptPreprocessingService.name); + private readonly tempDir = '/tmp/unraid-script-preprocessing'; + private readonly maxOutputSize = 100 * 1024 * 1024; // 100MB limit + + async executeScript(config: ScriptPreprocessConfig): Promise { + const startTime = Date.now(); + + try { + await this.ensureTempDirectory(); + + const { command, args } = this.buildCommand(config); + + this.logger.log(`Executing script: ${command} ${args.join(' ')}`); + + await this.runScriptWithTimeout(command, args, 3600); // Default 1 hour timeout + + const outputSize = await this.getFileSize(config.outputPath); + if (outputSize === 0) { + throw new Error('Script produced no output'); + } + + if (outputSize > this.maxOutputSize) { + throw new Error( + `Script output too large: ${outputSize} bytes (max: ${this.maxOutputSize})` + ); + } + + const duration = Date.now() - startTime; + this.logger.log( + `Script completed successfully in ${duration}ms, output size: ${outputSize} bytes` + ); + + return { + success: true, + outputPath: config.outputPath, + metadata: { + scriptPath: config.scriptPath, + duration, + outputSize, + workingDirectory: config.workingDirectory, + scriptArgs: config.scriptArgs, + }, + }; + } catch (error: unknown) { + const errorMessage = error instanceof Error ? error.message : String(error); + this.logger.error(`Script preprocessing failed: ${errorMessage}`); + + // Cleanup output file on failure + try { + await fs.unlink(config.outputPath); + } catch { + // Ignore cleanup errors + } + + return { + success: false, + error: errorMessage, + metadata: { + scriptPath: config.scriptPath, + duration: Date.now() - startTime, + workingDirectory: config.workingDirectory, + scriptArgs: config.scriptArgs, + }, + }; + } + } + + private async ensureTempDirectory(): Promise { + try { + await fs.access(this.tempDir); + } catch { + await fs.mkdir(this.tempDir, { recursive: true, mode: 0o700 }); + } + } + + private buildCommand(config: ScriptPreprocessConfig): { command: string; args: string[] } { + // Sandboxed execution with restricted permissions + const command = 'timeout'; + const args = [ + '3600s', // 1 hour timeout + 'nice', + '-n', + '10', + 'ionice', + '-c', + '3', + 'bash', + '-c', + `cd "${config.workingDirectory || '/tmp'}" && exec "${config.scriptPath}" ${(config.scriptArgs || []).join(' ')}`, + ]; + + return { command, args }; + } + + private async runScriptWithTimeout( + command: string, + args: string[], + timeoutSeconds: number + ): Promise { + try { + await execa(command, args, { + timeout: timeoutSeconds * 1000, + stdio: ['ignore', 'pipe', 'pipe'], + env: { + ...process.env, + PATH: '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin', + }, + uid: 99, // nobody user + gid: 99, // nobody group + }); + } catch (error: any) { + if (error.timedOut) { + throw new Error(`Script timeout after ${timeoutSeconds} seconds`); + } + if (error.signal) { + throw new Error(`Script killed by signal: ${error.signal}`); + } + if (error.exitCode !== undefined && error.exitCode !== 0) { + throw new Error( + `Script exited with code ${error.exitCode}. stderr: ${error.stderr || ''}` + ); + } + throw new Error(`Failed to execute script: ${error.message}`); + } + } + + private async getFileSize(filePath: string): Promise { + try { + const stats = await fs.stat(filePath); + return stats.size; + } catch { + return 0; + } + } + + async validateScript(config: ScriptPreprocessConfig): Promise<{ valid: boolean; error?: string }> { + try { + // Check if script exists and is executable + await fs.access(config.scriptPath, fs.constants.F_OK | fs.constants.X_OK); + + // Check script is not in restricted locations + const restrictedPaths = ['/boot', '/mnt/user', '/mnt/disk']; + const isRestricted = restrictedPaths.some((path) => config.scriptPath.startsWith(path)); + + if (isRestricted) { + return { + valid: false, + error: 'Script cannot be located in restricted paths (/boot, /mnt/user, /mnt/disk*)', + }; + } + + // Validate working directory if specified + if (config.workingDirectory) { + try { + await fs.access(config.workingDirectory, fs.constants.F_OK); + } catch { + return { + valid: false, + error: `Working directory does not exist: ${config.workingDirectory}`, + }; + } + } + + // Validate output path directory exists + const outputDir = dirname(config.outputPath); + try { + await fs.access(outputDir, fs.constants.F_OK | fs.constants.W_OK); + } catch { + return { + valid: false, + error: `Output directory does not exist or is not writable: ${outputDir}`, + }; + } + + // Validate script arguments + if (config.scriptArgs) { + for (const arg of config.scriptArgs) { + if (arg.length > 1000) { + return { + valid: false, + error: `Script argument too long (max 1000 characters): ${arg.substring(0, 50)}...`, + }; + } + } + } + + return { valid: true }; + } catch (error: unknown) { + const errorMessage = error instanceof Error ? error.message : String(error); + return { + valid: false, + error: `Script validation failed: ${errorMessage}`, + }; + } + } + + async cleanup(outputPath: string): Promise { + await this.cleanupFile(outputPath); + } + + private async cleanupFile(filePath: string): Promise { + try { + await fs.unlink(filePath); + this.logger.debug(`Cleaned up file: ${filePath}`); + } catch (error: unknown) { + const errorMessage = error instanceof Error ? error.message : String(error); + this.logger.warn(`Failed to cleanup file ${filePath}: ${errorMessage}`); + } + } + + async cleanupTempDirectory(): Promise { + try { + const files = await fs.readdir(this.tempDir); + const now = Date.now(); + const maxAge = 24 * 60 * 60 * 1000; // 24 hours + + for (const file of files) { + const filePath = join(this.tempDir, file); + try { + const stats = await fs.stat(filePath); + if (now - stats.mtime.getTime() > maxAge) { + await fs.unlink(filePath); + this.logger.debug(`Cleaned up old temp file: ${filePath}`); + } + } catch (error: unknown) { + const errorMessage = error instanceof Error ? error.message : String(error); + this.logger.warn(`Failed to cleanup old temp file ${filePath}: ${errorMessage}`); + } + } + } catch (error: unknown) { + const errorMessage = error instanceof Error ? error.message : String(error); + this.logger.warn(`Failed to cleanup temp directory: ${errorMessage}`); + } + } +} diff --git a/api/src/unraid-api/graph/resolvers/backup/preprocessing/script-validation.service.ts b/api/src/unraid-api/graph/resolvers/backup/preprocessing/script-validation.service.ts new file mode 100644 index 000000000..39fd7b2c2 --- /dev/null +++ b/api/src/unraid-api/graph/resolvers/backup/preprocessing/script-validation.service.ts @@ -0,0 +1,285 @@ +import { Injectable, Logger } from '@nestjs/common'; +import { access, stat } from 'fs/promises'; +import { dirname, isAbsolute, resolve } from 'path'; + +import { ScriptPreprocessConfigInput } from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.js'; + +export interface ScriptValidationResult { + isValid: boolean; + errors: string[]; + warnings: string[]; + metadata: { + scriptExists?: boolean; + scriptExecutable?: boolean; + workingDirectoryExists?: boolean; + outputDirectoryExists?: boolean; + outputDirectoryWritable?: boolean; + environmentVariablesValid?: boolean; + resolvedScriptPath?: string; + resolvedWorkingDirectory?: string; + resolvedOutputPath?: string; + }; +} + +@Injectable() +export class ScriptValidationService { + private readonly logger = new Logger(ScriptValidationService.name); + + async validateScriptConfig(config: ScriptPreprocessConfigInput): Promise { + const result: ScriptValidationResult = { + isValid: true, + errors: [], + warnings: [], + metadata: {}, + }; + + try { + // Resolve and validate script path + const resolvedScriptPath = this.resolveScriptPath( + config.scriptPath, + config.workingDirectory + ); + result.metadata.resolvedScriptPath = resolvedScriptPath; + + const scriptExists = await this.validateScriptExists(resolvedScriptPath); + result.metadata.scriptExists = scriptExists; + + if (!scriptExists) { + result.errors.push(`Script file '${resolvedScriptPath}' does not exist`); + result.isValid = false; + return result; + } + + // Check if script is executable + const scriptExecutable = await this.validateScriptExecutable(resolvedScriptPath); + result.metadata.scriptExecutable = scriptExecutable; + + if (!scriptExecutable) { + result.warnings.push(`Script file '${resolvedScriptPath}' may not be executable`); + } + + // Validate working directory + if (config.workingDirectory) { + const resolvedWorkingDir = resolve(config.workingDirectory); + result.metadata.resolvedWorkingDirectory = resolvedWorkingDir; + + const workingDirExists = await this.validateDirectory(resolvedWorkingDir); + result.metadata.workingDirectoryExists = workingDirExists; + + if (!workingDirExists) { + result.errors.push(`Working directory '${resolvedWorkingDir}' does not exist`); + result.isValid = false; + } + } + + // Validate output path and directory + const resolvedOutputPath = this.resolveOutputPath( + config.outputPath, + config.workingDirectory + ); + result.metadata.resolvedOutputPath = resolvedOutputPath; + + const outputDirectory = dirname(resolvedOutputPath); + const outputDirExists = await this.validateDirectory(outputDirectory); + result.metadata.outputDirectoryExists = outputDirExists; + + if (!outputDirExists) { + result.errors.push(`Output directory '${outputDirectory}' does not exist`); + result.isValid = false; + } else { + // Check if output directory is writable + const outputDirWritable = await this.validateDirectoryWritable(outputDirectory); + result.metadata.outputDirectoryWritable = outputDirWritable; + + if (!outputDirWritable) { + result.errors.push(`Output directory '${outputDirectory}' is not writable`); + result.isValid = false; + } + } + + // Validate environment variables + if (config.environment) { + const envValid = this.validateEnvironmentVariables(config.environment); + result.metadata.environmentVariablesValid = envValid; + + if (!envValid) { + result.warnings.push('Some environment variables may contain invalid values'); + } + } + + // Security validations + this.performSecurityValidations(config, result); + } catch (error: unknown) { + const errorMessage = error instanceof Error ? error.message : String(error); + result.errors.push(`Validation failed: ${errorMessage}`); + result.isValid = false; + } + + return result; + } + + private resolveScriptPath(scriptPath: string, workingDirectory?: string): string { + if (isAbsolute(scriptPath)) { + return scriptPath; + } + + const baseDir = workingDirectory || process.cwd(); + return resolve(baseDir, scriptPath); + } + + private resolveOutputPath(outputPath: string, workingDirectory?: string): string { + if (isAbsolute(outputPath)) { + return outputPath; + } + + const baseDir = workingDirectory || process.cwd(); + return resolve(baseDir, outputPath); + } + + async validateScriptExists(scriptPath: string): Promise { + try { + await access(scriptPath); + const stats = await stat(scriptPath); + return stats.isFile(); + } catch { + return false; + } + } + + async validateScriptExecutable(scriptPath: string): Promise { + try { + const stats = await stat(scriptPath); + // Check if file has execute permissions (basic check) + return (stats.mode & parseInt('111', 8)) !== 0; + } catch { + return false; + } + } + + async validateDirectory(dirPath: string): Promise { + try { + await access(dirPath); + const stats = await stat(dirPath); + return stats.isDirectory(); + } catch { + return false; + } + } + + async validateDirectoryWritable(dirPath: string): Promise { + try { + const stats = await stat(dirPath); + // Check if directory has write permissions (basic check) + return (stats.mode & parseInt('200', 8)) !== 0; + } catch { + return false; + } + } + + validateEnvironmentVariables(environment: Record): boolean { + try { + // Check for potentially dangerous environment variables + const dangerousVars = ['PATH', 'LD_LIBRARY_PATH', 'HOME', 'USER']; + const hasDangerousVars = Object.keys(environment).some((key) => + dangerousVars.includes(key.toUpperCase()) + ); + + if (hasDangerousVars) { + this.logger.warn('Script environment contains potentially dangerous variables'); + } + + // Check for valid variable names (basic validation) + const validVarName = /^[A-Za-z_][A-Za-z0-9_]*$/; + const invalidVars = Object.keys(environment).filter((key) => !validVarName.test(key)); + + if (invalidVars.length > 0) { + this.logger.warn(`Invalid environment variable names: ${invalidVars.join(', ')}`); + return false; + } + + return true; + } catch { + return false; + } + } + + private performSecurityValidations( + config: ScriptPreprocessConfigInput, + result: ScriptValidationResult + ): void { + // Check for potentially dangerous script paths + const dangerousPaths = ['/bin', '/usr/bin', '/sbin', '/usr/sbin']; + const scriptInDangerousPath = dangerousPaths.some((path) => + result.metadata.resolvedScriptPath?.startsWith(path) + ); + + if (scriptInDangerousPath) { + result.warnings.push( + 'Script is located in a system directory. Ensure it is safe to execute.' + ); + } + + // Check for dangerous script arguments + if (config.scriptArgs) { + const dangerousArgs = config.scriptArgs.filter( + (arg) => + arg.includes('..') || + arg.includes('rm ') || + arg.includes('sudo ') || + arg.includes('su ') + ); + + if (dangerousArgs.length > 0) { + result.warnings.push( + 'Script arguments contain potentially dangerous commands or paths.' + ); + } + } + + // Check if output path is in a safe location + if (result.metadata.resolvedOutputPath) { + const systemPaths = ['/bin', '/usr', '/etc', '/var', '/sys', '/proc']; + const outputInSystemPath = systemPaths.some((path) => + result.metadata.resolvedOutputPath?.startsWith(path) + ); + + if (outputInSystemPath) { + result.errors.push('Output path cannot be in system directories for security reasons.'); + result.isValid = false; + } + } + + // Validate script file extension for common script types + if (result.metadata.resolvedScriptPath) { + const scriptExt = result.metadata.resolvedScriptPath.split('.').pop()?.toLowerCase(); + const allowedExtensions = ['sh', 'bash', 'py', 'pl', 'rb', 'js', 'php']; + + if (scriptExt && !allowedExtensions.includes(scriptExt)) { + result.warnings.push( + `Script extension '.${scriptExt}' is not commonly recognized. Ensure it is executable.` + ); + } + } + } + + async getScriptInfo(scriptPath: string): Promise<{ + size: number | null; + lastModified: Date | null; + permissions: string | null; + }> { + try { + const stats = await stat(scriptPath); + return { + size: stats.size, + lastModified: stats.mtime, + permissions: '0' + (stats.mode & parseInt('777', 8)).toString(8), + }; + } catch { + return { + size: null, + lastModified: null, + permissions: null, + }; + } + } +} diff --git a/api/src/unraid-api/graph/resolvers/backup/preprocessing/streaming-job-manager.service.ts b/api/src/unraid-api/graph/resolvers/backup/preprocessing/streaming-job-manager.service.ts new file mode 100644 index 000000000..a7ebc6015 --- /dev/null +++ b/api/src/unraid-api/graph/resolvers/backup/preprocessing/streaming-job-manager.service.ts @@ -0,0 +1,279 @@ +import { Injectable, Logger } from '@nestjs/common'; +import { EventEmitter } from 'events'; + +import { execa } from 'execa'; +import { v4 as uuidv4 } from 'uuid'; + +import { + PreprocessType, + StreamingJobInfo, +} from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.js'; + +export interface StreamingJobOptions { + command: string; + args: string[]; + cwd?: string; + env?: Record; + timeout?: number; + onProgress?: (progress: number) => void; + onOutput?: (data: string) => void; + onError?: (error: string) => void; +} + +export interface StreamingJobResult { + success: boolean; + exitCode?: number; + signal?: string; + error?: string; + output?: string; + duration: number; +} + +@Injectable() +export class StreamingJobManager extends EventEmitter { + private readonly logger = new Logger(StreamingJobManager.name); + private readonly activeJobs = new Map(); + private readonly processes = new Map>(); + + async startStreamingJob( + type: PreprocessType, + options: StreamingJobOptions + ): Promise<{ jobId: string; promise: Promise }> { + const jobId = uuidv4(); + const startTime = new Date(); + + const jobInfo: StreamingJobInfo = { + jobId, + processId: 0, + startTime, + type, + status: 'running', + progress: 0, + }; + + this.activeJobs.set(jobId, jobInfo); + + const promise = this.executeStreamingJob(jobId, options); + + this.logger.log(`Started streaming job ${jobId} for ${type}`); + this.emit('jobStarted', jobInfo); + + return { jobId, promise }; + } + + private async executeStreamingJob( + jobId: string, + options: StreamingJobOptions + ): Promise { + const startTime = Date.now(); + let timeoutHandle: NodeJS.Timeout | undefined; + + return new Promise((resolve) => { + const jobInfo = this.activeJobs.get(jobId); + if (!jobInfo) { + resolve({ + success: false, + error: 'Job not found', + duration: 0, + }); + return; + } + + const childProcess = execa(options.command, options.args, { + cwd: options.cwd, + env: { ...process.env, ...options.env }, + stdio: ['pipe', 'pipe', 'pipe'], + timeout: options.timeout, + killSignal: 'SIGTERM', + }); + + jobInfo.processId = childProcess.pid || 0; + this.processes.set(jobId, childProcess); + + let output = ''; + let errorOutput = ''; + + if (options.timeout) { + timeoutHandle = setTimeout(() => { + this.logger.warn(`Streaming job ${jobId} timed out after ${options.timeout}ms`); + this.cancelJob(jobId); + }, options.timeout); + } + + childProcess.stdout?.on('data', (data: Buffer) => { + const chunk = data.toString(); + output += chunk; + + if (options.onOutput) { + options.onOutput(chunk); + } + + this.extractProgress(jobId, chunk, options.onProgress); + }); + + childProcess.stderr?.on('data', (data: Buffer) => { + const chunk = data.toString(); + errorOutput += chunk; + + if (options.onError) { + options.onError(chunk); + } + + this.extractProgress(jobId, chunk, options.onProgress); + }); + + childProcess + .then((result) => { + if (timeoutHandle) { + clearTimeout(timeoutHandle); + } + + const duration = Date.now() - startTime; + const success = result.exitCode === 0; + + jobInfo.status = success ? 'completed' : 'failed'; + if (!success) { + jobInfo.error = result.stderr || `Process exited with code ${result.exitCode}`; + } + + this.cleanup(jobId); + + const jobResult: StreamingJobResult = { + success, + exitCode: result.exitCode, + output: success ? result.stdout : undefined, + duration, + }; + + this.logger.log( + `Streaming job ${jobId} completed: ${success ? 'success' : 'failed'}` + ); + this.emit('jobCompleted', { jobInfo, result: jobResult }); + + resolve(jobResult); + }) + .catch((error) => { + if (timeoutHandle) { + clearTimeout(timeoutHandle); + } + + const duration = Date.now() - startTime; + jobInfo.status = error.isCanceled ? 'cancelled' : 'failed'; + jobInfo.error = error.message; + + this.cleanup(jobId); + + const jobResult: StreamingJobResult = { + success: false, + exitCode: error.exitCode, + signal: error.signal, + error: error.message, + duration, + }; + + this.logger.error(`Streaming job ${jobId} failed:`, error); + this.emit('jobCompleted', { jobInfo, result: jobResult }); + + resolve(jobResult); + }); + }); + } + + private extractProgress( + jobId: string, + output: string, + onProgress?: (progress: number) => void + ): void { + const jobInfo = this.activeJobs.get(jobId); + if (!jobInfo) return; + + let progress = jobInfo.progress || 0; + + if (jobInfo.type === PreprocessType.ZFS) { + const match = output.match(/(\d+(?:\.\d+)?)%/); + if (match) { + progress = parseFloat(match[1]); + } + } else if (jobInfo.type === PreprocessType.FLASH) { + const lines = output.split('\n'); + const totalLines = lines.length; + if (totalLines > 0) { + progress = Math.min(100, (totalLines / 1000) * 100); + } + } + + if (progress !== jobInfo.progress) { + jobInfo.progress = progress; + if (onProgress) { + onProgress(progress); + } + this.emit('jobProgress', { jobId, progress }); + } + } + + cancelJob(jobId: string): boolean { + const childProcess = this.processes.get(jobId); + const jobInfo = this.activeJobs.get(jobId); + + if (!childProcess || !jobInfo) { + return false; + } + + try { + jobInfo.status = 'cancelled'; + childProcess.kill('SIGTERM'); + + this.logger.log(`Cancelled streaming job ${jobId}`); + this.emit('jobCancelled', jobInfo); + return true; + } catch (error) { + this.logger.error(`Failed to cancel streaming job ${jobId}:`, error); + return false; + } + } + + getJobInfo(jobId: string): StreamingJobInfo | undefined { + return this.activeJobs.get(jobId); + } + + getAllActiveJobs(): StreamingJobInfo[] { + return Array.from(this.activeJobs.values()); + } + + getJobsByType(type: PreprocessType): StreamingJobInfo[] { + return Array.from(this.activeJobs.values()).filter((job) => job.type === type); + } + + private cleanup(jobId: string): void { + this.processes.delete(jobId); + this.activeJobs.delete(jobId); + } + + async cleanupAllJobs(): Promise { + const activeJobIds = Array.from(this.activeJobs.keys()); + + for (const jobId of activeJobIds) { + this.cancelJob(jobId); + } + + await new Promise((resolve) => setTimeout(resolve, 1000)); + + this.processes.clear(); + this.activeJobs.clear(); + + this.logger.log('Cleaned up all streaming jobs'); + } + + isJobRunning(jobId: string): boolean { + const jobInfo = this.activeJobs.get(jobId); + return jobInfo?.status === 'running'; + } + + getJobCount(): number { + return this.activeJobs.size; + } + + getJobCountByType(type: PreprocessType): number { + return this.getJobsByType(type).length; + } +} diff --git a/api/src/unraid-api/graph/resolvers/backup/preprocessing/zfs-preprocessing.service.ts b/api/src/unraid-api/graph/resolvers/backup/preprocessing/zfs-preprocessing.service.ts new file mode 100644 index 000000000..c88d17da0 --- /dev/null +++ b/api/src/unraid-api/graph/resolvers/backup/preprocessing/zfs-preprocessing.service.ts @@ -0,0 +1,179 @@ +import { Injectable, Logger } from '@nestjs/common'; + +import { execa } from 'execa'; + +import { + PreprocessResult, + PreprocessType, + ZfsPreprocessConfigInput, +} from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.js'; +import { + StreamingJobManager, + StreamingJobOptions, +} from '@app/unraid-api/graph/resolvers/backup/preprocessing/streaming-job-manager.service.js'; +import { ZfsValidationService } from '@app/unraid-api/graph/resolvers/backup/preprocessing/zfs-validation.service.js'; + +@Injectable() +export class ZfsPreprocessingService { + private readonly logger = new Logger(ZfsPreprocessingService.name); + + constructor( + private readonly streamingJobManager: StreamingJobManager, + private readonly zfsValidationService: ZfsValidationService + ) {} + + async executeZfsPreprocessing( + config: ZfsPreprocessConfigInput, + remotePath: string, + timeout: number = 3600000 + ): Promise { + // Validate configuration first + const validationResult = await this.zfsValidationService.validateZfsConfig(config); + if (!validationResult.isValid) { + return { + success: false, + error: `ZFS configuration validation failed: ${validationResult.errors.join(', ')}`, + metadata: { + validationErrors: validationResult.errors, + validationWarnings: validationResult.warnings, + }, + }; + } + + // Log any warnings + if (validationResult.warnings.length > 0) { + this.logger.warn(`ZFS preprocessing warnings: ${validationResult.warnings.join(', ')}`); + } + + const snapshotName = this.generateSnapshotName(config.datasetName); + const fullSnapshotPath = `${config.poolName}/${config.datasetName}@${snapshotName}`; + + try { + await this.createSnapshot(fullSnapshotPath); + this.logger.log(`Created ZFS snapshot: ${fullSnapshotPath}`); + + const streamingResult = await this.streamSnapshot( + fullSnapshotPath, + remotePath, + config, + timeout + ); + + if (config.cleanupSnapshots) { + await this.cleanupSnapshot(fullSnapshotPath); + this.logger.log(`Cleaned up ZFS snapshot: ${fullSnapshotPath}`); + } + + return { + success: true, + outputPath: streamingResult.remotePath, + snapshotName: fullSnapshotPath, + metadata: { + snapshotName: fullSnapshotPath, + bytesTransferred: streamingResult.bytesTransferred, + duration: streamingResult.duration, + cleanedUp: config.cleanupSnapshots, + validationWarnings: validationResult.warnings, + datasetInfo: validationResult.metadata, + }, + }; + } catch (error: unknown) { + const errorMessage = error instanceof Error ? error.message : String(error); + this.logger.error( + `ZFS preprocessing failed: ${errorMessage}`, + error instanceof Error ? error.stack : undefined + ); + + if (config.cleanupSnapshots) { + try { + await this.cleanupSnapshot(fullSnapshotPath); + this.logger.log(`Cleaned up ZFS snapshot after failure: ${fullSnapshotPath}`); + } catch (cleanupError: unknown) { + const cleanupErrorMessage = + cleanupError instanceof Error ? cleanupError.message : String(cleanupError); + this.logger.error(`Failed to cleanup snapshot: ${cleanupErrorMessage}`); + } + } + + return { + success: false, + error: errorMessage, + snapshotName: fullSnapshotPath, + cleanupRequired: config.cleanupSnapshots, + metadata: { + snapshotName: fullSnapshotPath, + cleanupAttempted: config.cleanupSnapshots, + }, + }; + } + } + + private async createSnapshot(snapshotPath: string): Promise { + try { + await execa('zfs', ['snapshot', snapshotPath]); + } catch (error: unknown) { + const errorMessage = error instanceof Error ? error.message : String(error); + throw new Error(`ZFS snapshot creation failed: ${errorMessage}`); + } + } + + private async streamSnapshot( + snapshotPath: string, + remotePath: string, + config: ZfsPreprocessConfigInput, + timeout: number + ): Promise<{ remotePath: string; bytesTransferred: number; duration: number }> { + const zfsSendArgs = ['send', snapshotPath]; + + const streamingOptions: StreamingJobOptions = { + command: 'zfs', + args: zfsSendArgs, + timeout, + onProgress: (progress) => { + this.logger.debug(`ZFS streaming progress: ${progress}%`); + }, + onOutput: (data) => { + this.logger.debug(`ZFS send output: ${data.slice(0, 100)}...`); + }, + onError: (error) => { + this.logger.error(`ZFS send error: ${error}`); + }, + }; + + const { jobId, promise } = await this.streamingJobManager.startStreamingJob( + PreprocessType.ZFS, + streamingOptions + ); + + try { + const result = await promise; + + if (!result.success) { + throw new Error(result.error || 'ZFS streaming failed'); + } + + return { + remotePath, + bytesTransferred: 0, + duration: result.duration, + }; + } catch (error: unknown) { + this.streamingJobManager.cancelJob(jobId); + throw error; + } + } + + private async cleanupSnapshot(snapshotPath: string): Promise { + try { + await execa('zfs', ['destroy', snapshotPath]); + } catch (error: unknown) { + const errorMessage = error instanceof Error ? error.message : String(error); + throw new Error(`ZFS snapshot cleanup failed: ${errorMessage}`); + } + } + + private generateSnapshotName(datasetName: string): string { + const timestamp = new Date().toISOString().replace(/[:.]/g, '-'); + return `backup-${datasetName}-${timestamp}`; + } +} diff --git a/api/src/unraid-api/graph/resolvers/backup/preprocessing/zfs-validation.service.ts b/api/src/unraid-api/graph/resolvers/backup/preprocessing/zfs-validation.service.ts new file mode 100644 index 000000000..93a11d46d --- /dev/null +++ b/api/src/unraid-api/graph/resolvers/backup/preprocessing/zfs-validation.service.ts @@ -0,0 +1,244 @@ +import { Injectable, Logger } from '@nestjs/common'; + +import { execa } from 'execa'; + +import { ZfsPreprocessConfigInput } from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.js'; + +export interface ZfsValidationResult { + isValid: boolean; + errors: string[]; + warnings: string[]; + metadata: { + poolExists?: boolean; + datasetExists?: boolean; + datasetSize?: number; + availableSpace?: number; + mountpoint?: string; + }; +} + +@Injectable() +export class ZfsValidationService { + private readonly logger = new Logger(ZfsValidationService.name); + + async validateZfsConfig(config: ZfsPreprocessConfigInput): Promise { + const result: ZfsValidationResult = { + isValid: true, + errors: [], + warnings: [], + metadata: {}, + }; + + try { + // Validate pool exists + const poolExists = await this.validatePool(config.poolName); + result.metadata.poolExists = poolExists; + + if (!poolExists) { + result.errors.push(`ZFS pool '${config.poolName}' does not exist`); + result.isValid = false; + return result; + } + + // Validate dataset exists + const datasetExists = await this.validateDataset(config.poolName, config.datasetName); + result.metadata.datasetExists = datasetExists; + + if (!datasetExists) { + result.errors.push( + `ZFS dataset '${config.poolName}/${config.datasetName}' does not exist` + ); + result.isValid = false; + return result; + } + + // Get dataset information + const datasetInfo = await this.getDatasetInfo(config.poolName, config.datasetName); + result.metadata = { ...result.metadata, ...datasetInfo }; + + // Validate dataset is mounted + if (!datasetInfo.mountpoint || datasetInfo.mountpoint === 'none') { + result.warnings.push( + `Dataset '${config.poolName}/${config.datasetName}' is not mounted` + ); + } + + // Check available space for snapshots + if (datasetInfo.availableSpace && datasetInfo.datasetSize) { + const spaceRatio = datasetInfo.availableSpace / datasetInfo.datasetSize; + if (spaceRatio < 0.1) { + result.warnings.push( + 'Low available space for snapshot creation (less than 10% of dataset size)' + ); + } + } + + // Validate snapshot retention settings + if (config.retainSnapshots && config.retainSnapshots < 1) { + result.errors.push('Retain snapshots must be at least 1'); + result.isValid = false; + } + + // Check for existing snapshots if cleanup is disabled + if (!config.cleanupSnapshots) { + const existingSnapshots = await this.getExistingSnapshots( + config.poolName, + config.datasetName, + config.snapshotPrefix + ); + if (existingSnapshots.length > 10) { + result.warnings.push( + `Found ${existingSnapshots.length} existing snapshots. Consider enabling cleanup.` + ); + } + } + } catch (error: unknown) { + const errorMessage = error instanceof Error ? error.message : String(error); + result.errors.push(`Validation failed: ${errorMessage}`); + result.isValid = false; + } + + return result; + } + + async validatePool(poolName: string): Promise { + try { + await execa('zpool', ['list', '-H', '-o', 'name', poolName]); + return true; + } catch { + return false; + } + } + + async validateDataset(poolName: string, datasetName: string): Promise { + const fullPath = `${poolName}/${datasetName}`; + try { + await execa('zfs', ['list', '-H', '-o', 'name', fullPath]); + return true; + } catch { + return false; + } + } + + async getDatasetInfo( + poolName: string, + datasetName: string + ): Promise<{ + datasetSize?: number; + availableSpace?: number; + mountpoint?: string; + }> { + const fullPath = `${poolName}/${datasetName}`; + const result: { datasetSize?: number; availableSpace?: number; mountpoint?: string } = {}; + + try { + // Get dataset size + const { stdout: sizeOutput } = await execa('zfs', [ + 'list', + '-H', + '-p', + '-o', + 'used', + fullPath, + ]); + const size = parseInt(sizeOutput.trim(), 10); + if (!isNaN(size)) { + result.datasetSize = size; + } + } catch (error: unknown) { + this.logger.warn( + `Failed to get dataset size: ${error instanceof Error ? error.message : String(error)}` + ); + } + + try { + // Get available space + const { stdout: availOutput } = await execa('zfs', [ + 'list', + '-H', + '-p', + '-o', + 'avail', + fullPath, + ]); + const avail = parseInt(availOutput.trim(), 10); + if (!isNaN(avail)) { + result.availableSpace = avail; + } + } catch (error: unknown) { + this.logger.warn( + `Failed to get available space: ${error instanceof Error ? error.message : String(error)}` + ); + } + + try { + // Get mountpoint + const { stdout: mountOutput } = await execa('zfs', [ + 'list', + '-H', + '-o', + 'mountpoint', + fullPath, + ]); + result.mountpoint = mountOutput.trim(); + } catch (error: unknown) { + this.logger.warn( + `Failed to get mountpoint: ${error instanceof Error ? error.message : String(error)}` + ); + } + + return result; + } + + async getExistingSnapshots( + poolName: string, + datasetName: string, + prefix?: string + ): Promise { + const fullPath = `${poolName}/${datasetName}`; + + try { + const { stdout } = await execa('zfs', [ + 'list', + '-H', + '-t', + 'snapshot', + '-o', + 'name', + '-r', + fullPath, + ]); + const snapshots = stdout.split('\n').filter((line) => line.trim()); + + if (prefix) { + const prefixPattern = `${fullPath}@${prefix}`; + return snapshots.filter((snapshot) => snapshot.startsWith(prefixPattern)); + } + + return snapshots.filter((snapshot) => snapshot.startsWith(`${fullPath}@`)); + } catch { + return []; + } + } + + async getPoolHealth(poolName: string): Promise { + try { + const { stdout } = await execa('zpool', ['list', '-H', '-o', 'health', poolName]); + return stdout.trim(); + } catch { + return null; + } + } + + async canCreateSnapshot(poolName: string, datasetName: string): Promise { + // Check if we have write permissions and the dataset is not readonly + const fullPath = `${poolName}/${datasetName}`; + + try { + const { stdout } = await execa('zfs', ['get', '-H', '-o', 'value', 'readonly', fullPath]); + return stdout.trim() === 'off'; + } catch { + return false; + } + } +} diff --git a/api/src/unraid-api/graph/resolvers/rclone/rclone-api.service.ts b/api/src/unraid-api/graph/resolvers/rclone/rclone-api.service.ts index 4a81ed8ab..c4dc3123d 100644 --- a/api/src/unraid-api/graph/resolvers/rclone/rclone-api.service.ts +++ b/api/src/unraid-api/graph/resolvers/rclone/rclone-api.service.ts @@ -10,8 +10,24 @@ import got, { HTTPError } from 'got'; import pRetry from 'p-retry'; import { sanitizeParams } from '@app/core/log.js'; +import { + BACKUP_JOB_GROUP_PREFIX, + getBackupJobGroupId, + getConfigIdFromGroupId, + isBackupJobGroup, +} from '@app/unraid-api/graph/resolvers/backup/backup.utils.js'; +import { + PreprocessType, + StreamingJobInfo, +} from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.js'; +import { + StreamingJobManager, + StreamingJobOptions, + StreamingJobResult, +} from '@app/unraid-api/graph/resolvers/backup/preprocessing/streaming-job-manager.service.js'; import { RCloneStatusService } from '@app/unraid-api/graph/resolvers/rclone/rclone-status.service.js'; import { + BackupJobStatus, CreateRCloneRemoteDto, DeleteRCloneRemoteDto, GetRCloneJobStatusDto, @@ -20,7 +36,6 @@ import { RCloneJob, RCloneJobListResponse, RCloneJobStats, - RCloneJobStatus, RCloneProviderResponse, RCloneRemoteConfig, RCloneStartBackupInput, @@ -51,6 +66,39 @@ interface JobOperationResult { errors: string[]; } +export interface StreamingBackupOptions { + remoteName: string; + remotePath: string; + sourceStream?: NodeJS.ReadableStream; + sourceCommand?: string; + sourceArgs?: string[]; + preprocessType?: PreprocessType; + onProgress?: (progress: number) => void; + onOutput?: (data: string) => void; + onError?: (error: string) => void; + timeout?: number; +} + +export interface StreamingBackupResult { + success: boolean; + jobId?: string; + rcloneJobId?: string; + error?: string; + duration: number; + bytesTransferred?: number; +} + +export interface UnifiedJobStatus { + jobId: string; + type: 'daemon' | 'streaming'; + status: BackupJobStatus; + progress?: number; + stats?: RCloneJobStats; + error?: string; + startTime?: Date; + preprocessType?: PreprocessType; +} + const CONSTANTS = { RETRY_CONFIG: { retries: 6, @@ -70,8 +118,6 @@ const CONSTANTS = { }, } as const; -const JOB_GROUP_PREFIX = 'backup-'; - @Injectable() export class RCloneApiService implements OnModuleInit, OnModuleDestroy { private isInitialized: boolean = false; @@ -84,7 +130,10 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy { private readonly rclonePassword: string = process.env.RCLONE_PASSWORD || crypto.randomBytes(24).toString('base64'); - constructor(private readonly statusService: RCloneStatusService) {} + constructor( + private readonly statusService: RCloneStatusService, + private readonly streamingJobManager: StreamingJobManager + ) {} async onModuleInit(): Promise { try { @@ -132,13 +181,23 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy { const rcloneArgs = this.buildRcloneArgs(socketPath, logFilePath); this.logger.log(`Starting RClone RC daemon on socket: ${socketPath}`); - this.rcloneProcess = execa('rclone', rcloneArgs, { detached: false }); + const rcloneProcessExecution = execa('rclone', rcloneArgs, { detached: false }); + this.rcloneProcess = rcloneProcessExecution; this.setupProcessListeners(); + rcloneProcessExecution.catch((error) => { + this.logger.debug( + `Rclone process execution promise rejected (expected if process failed to start or exited prematurely): ${ + error.shortMessage || error.message + }` + ); + }); + await this.waitForSocketReady(); + this.logger.log('RClone RC daemon started and socket is ready.'); return true; } catch (error: unknown) { - this.logger.error(`Error starting RClone RC daemon: ${error}`); + this.logger.error(`Error during RClone RC daemon startup sequence: ${error}`); this.cleanupFailedProcess(); return false; } @@ -317,8 +376,8 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy { this.logger.log(`Starting backup: ${input.srcPath} → ${input.dstPath}`); const group = input.configId - ? `${JOB_GROUP_PREFIX}${input.configId}` - : JOB_GROUP_PREFIX + 'manual'; + ? getBackupJobGroupId(input.configId) + : BACKUP_JOB_GROUP_PREFIX + 'manual'; const params = { srcFs: input.srcPath, @@ -335,6 +394,185 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy { return result; } + async startStreamingBackup(options: StreamingBackupOptions): Promise { + const startTime = Date.now(); + + try { + if (!options.sourceCommand || !options.sourceArgs) { + throw new Error('Source command and args are required for streaming backup'); + } + + const remotePath = `${options.remoteName}:${options.remotePath}`; + + const streamingOptions: StreamingJobOptions = { + command: 'sh', + args: [ + '-c', + `${options.sourceCommand} ${options.sourceArgs.join(' ')} | rclone rcat "${remotePath}"`, + ], + timeout: options.timeout, + onProgress: options.onProgress, + onOutput: options.onOutput, + onError: options.onError, + }; + + const { jobId, promise } = await this.streamingJobManager.startStreamingJob( + options.preprocessType || PreprocessType.NONE, + streamingOptions + ); + + const result = await promise; + const duration = Date.now() - startTime; + + return { + success: result.success, + jobId, + error: result.error, + duration, + bytesTransferred: this.extractBytesFromOutput(result.output), + }; + } catch (error: unknown) { + const duration = Date.now() - startTime; + this.logger.error(`Streaming backup failed: ${error}`); + + return { + success: false, + error: error instanceof Error ? error.message : String(error), + duration, + }; + } + } + + async getUnifiedJobStatus(jobId: string): Promise { + const streamingJob = this.streamingJobManager.getJobInfo(jobId); + + if (streamingJob) { + return { + jobId: streamingJob.jobId, + type: 'streaming', + status: this.mapStreamingStatusToBackupStatus(streamingJob.status), + progress: streamingJob.progress, + error: streamingJob.error, + startTime: streamingJob.startTime, + preprocessType: streamingJob.type, + }; + } + + try { + const rcloneJob = await this.getEnhancedJobStatus(jobId); + if (rcloneJob) { + return { + jobId: rcloneJob.id, + type: 'daemon', + status: rcloneJob.status || BackupJobStatus.FAILED, + progress: rcloneJob.stats?.percent, + stats: rcloneJob.stats, + error: rcloneJob.error, + }; + } + } catch (error) { + this.logger.warn(`Failed to get RClone job status for ${jobId}: ${error}`); + } + + return null; + } + + async getAllUnifiedJobs(): Promise { + const unifiedJobs: UnifiedJobStatus[] = []; + + const streamingJobs = this.streamingJobManager.getAllActiveJobs(); + for (const job of streamingJobs) { + unifiedJobs.push({ + jobId: job.jobId, + type: 'streaming', + status: this.mapStreamingStatusToBackupStatus(job.status), + progress: job.progress, + error: job.error, + startTime: job.startTime, + preprocessType: job.type, + }); + } + + try { + const rcloneJobs = await this.getAllJobsWithStats(); + for (const job of rcloneJobs) { + unifiedJobs.push({ + jobId: job.id, + type: 'daemon', + status: job.status || BackupJobStatus.FAILED, + progress: job.stats?.percent, + stats: job.stats, + error: job.error, + }); + } + } catch (error) { + this.logger.warn(`Failed to get RClone jobs: ${error}`); + } + + return unifiedJobs; + } + + async stopUnifiedJob(jobId: string): Promise { + if (this.streamingJobManager.isJobRunning(jobId)) { + return this.streamingJobManager.cancelJob(jobId); + } + + try { + const result = await this.stopJob(jobId); + return result.stopped.length > 0; + } catch (error) { + this.logger.warn(`Failed to stop RClone job ${jobId}: ${error}`); + return false; + } + } + + private mapStreamingStatusToBackupStatus( + status: 'running' | 'completed' | 'failed' | 'cancelled' + ): BackupJobStatus { + switch (status) { + case 'running': + return BackupJobStatus.RUNNING; + case 'completed': + return BackupJobStatus.COMPLETED; + case 'failed': + return BackupJobStatus.FAILED; + case 'cancelled': + return BackupJobStatus.CANCELLED; + default: + return BackupJobStatus.FAILED; + } + } + + private extractBytesFromOutput(output?: string): number | undefined { + if (!output) return undefined; + + const bytesMatch = output.match(/(\d+)\s*bytes/i); + if (bytesMatch) { + return parseInt(bytesMatch[1], 10); + } + + const sizeMatch = output.match(/(\d+(?:\.\d+)?)\s*(KB|MB|GB|TB)/i); + if (sizeMatch) { + const value = parseFloat(sizeMatch[1]); + const unit = sizeMatch[2].toUpperCase(); + + switch (unit) { + case 'KB': + return Math.round(value * 1024); + case 'MB': + return Math.round(value * 1024 * 1024); + case 'GB': + return Math.round(value * 1024 * 1024 * 1024); + case 'TB': + return Math.round(value * 1024 * 1024 * 1024 * 1024); + default: + return undefined; + } + } + + return undefined; + } + /** * Gets enhanced job status with computed fields */ @@ -342,8 +580,7 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy { try { await validateObject(GetRCloneJobStatusDto, { jobId }); - // If the jobId looks like a group name (starts with backup-), get group stats - if (jobId.startsWith(JOB_GROUP_PREFIX)) { + if (isBackupJobGroup(jobId)) { try { const stats = await this.callRcloneApi('core/stats', { group: jobId }); const enhancedStats = this.statusService.enhanceStatsWithFormattedFields({ @@ -352,10 +589,10 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy { }); const job = this.statusService.transformStatsToJob(jobId, enhancedStats); - job.configId = configId || jobId.substring(JOB_GROUP_PREFIX.length); + job.configId = configId || getConfigIdFromGroupId(jobId); // Add computed fields - job.isRunning = job.status === RCloneJobStatus.RUNNING; + job.isRunning = job.status === BackupJobStatus.RUNNING; job.errorMessage = job.error || undefined; return job; @@ -373,7 +610,7 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy { const job = this.statusService.transformStatsToJob(jobId, enhancedStats); // Add computed fields - job.isRunning = job.status === RCloneJobStatus.RUNNING; + job.isRunning = job.status === BackupJobStatus.RUNNING; job.errorMessage = job.error || undefined; // Add configId if provided @@ -448,7 +685,7 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy { } const backupGroups = (groupList.groups || []).filter((group: string) => - group.startsWith(JOB_GROUP_PREFIX) + isBackupJobGroup(group) ); if (backupGroups.length === 0) { @@ -478,18 +715,15 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy { this.logger.debug(`Processing group ${group}: stats=${JSON.stringify(groupStats)}`); - const configId = group.startsWith(JOB_GROUP_PREFIX) - ? group.substring(JOB_GROUP_PREFIX.length) - : undefined; + const extractedConfigId = getConfigIdFromGroupId(group); - // Use the group name as the job ID for consistency, but add group info to stats const enhancedStats = this.statusService.enhanceStatsWithFormattedFields({ ...groupStats, - group, // Add group to stats so it gets picked up in transformStatsToJob + group, }); const job = this.statusService.transformStatsToJob(group, enhancedStats); - job.configId = configId; + job.configId = extractedConfigId; // Only include jobs that are truly active (not completed) const isActivelyTransferring = groupStats.transferring?.length > 0; @@ -527,8 +761,7 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy { async stopJob(jobId: string): Promise { this.logger.log(`Stopping job: ${jobId}`); - // Check if this is a group name (starts with backup-) or an individual job ID - if (jobId.startsWith(JOB_GROUP_PREFIX)) { + if (isBackupJobGroup(jobId)) { // This is a group, use the stopgroup endpoint return this.executeGroupOperation([jobId], 'stopgroup'); } else { diff --git a/api/src/unraid-api/graph/resolvers/rclone/rclone-status.service.test.ts b/api/src/unraid-api/graph/resolvers/rclone/rclone-status.service.test.ts index c8ef1e600..f09f86fe6 100644 --- a/api/src/unraid-api/graph/resolvers/rclone/rclone-status.service.test.ts +++ b/api/src/unraid-api/graph/resolvers/rclone/rclone-status.service.test.ts @@ -3,7 +3,7 @@ import { Test, TestingModule } from '@nestjs/testing'; import { beforeEach, describe, expect, it, vi } from 'vitest'; import { RCloneStatusService } from '@app/unraid-api/graph/resolvers/rclone/rclone-status.service.js'; -import { RCloneJobStats, RCloneJobStatus } from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js'; +import { BackupJobStatus, RCloneJobStats } from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js'; import { FormatService } from '@app/unraid-api/utils/format.service.js'; // Mock NestJS Logger to suppress logs during tests @@ -178,7 +178,7 @@ describe('RCloneStatusService', () => { success: true, error: undefined, progressPercentage: 100, - status: RCloneJobStatus.COMPLETED, + status: BackupJobStatus.COMPLETED, hasRecentJob: true, }); }); @@ -203,7 +203,7 @@ describe('RCloneStatusService', () => { success: true, error: undefined, progressPercentage: 60, - status: RCloneJobStatus.RUNNING, + status: BackupJobStatus.RUNNING, hasRecentJob: true, }); }); @@ -229,7 +229,7 @@ describe('RCloneStatusService', () => { success: false, error: 'Connection timeout', progressPercentage: 0, - status: RCloneJobStatus.ERROR, + status: BackupJobStatus.FAILED, hasRecentJob: true, }); }); @@ -255,7 +255,7 @@ describe('RCloneStatusService', () => { success: false, error: 'context canceled', progressPercentage: 0, - status: RCloneJobStatus.CANCELLED, + status: BackupJobStatus.CANCELLED, hasRecentJob: true, }); }); diff --git a/api/src/unraid-api/graph/resolvers/rclone/rclone-status.service.ts b/api/src/unraid-api/graph/resolvers/rclone/rclone-status.service.ts index 941a2786d..ba33fc337 100644 --- a/api/src/unraid-api/graph/resolvers/rclone/rclone-status.service.ts +++ b/api/src/unraid-api/graph/resolvers/rclone/rclone-status.service.ts @@ -1,10 +1,10 @@ import { Injectable, Logger } from '@nestjs/common'; import { + BackupJobStatus, RCloneJob, RCloneJobListResponse, RCloneJobStats, - RCloneJobStatus, RCloneJobWithStats, } from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js'; import { FormatService } from '@app/unraid-api/utils/format.service.js'; @@ -104,7 +104,7 @@ export class RCloneStatusService { this.logger.debug(`Stats for job ${jobId}: %o`, stats); const group = stats.group || undefined; - this.logger.debug(`Processing job ${jobId}: group="${group}", stats: ${JSON.stringify(stats)}`); + this.logger.debug(`Processing job ${jobId}: group="${group}"`); const isFinished = stats.fatalError === false && @@ -115,18 +115,18 @@ export class RCloneStatusService { const isCancelled = stats.lastError === 'context canceled'; // Determine status - let status: RCloneJobStatus; + let status: BackupJobStatus; if (hasError) { if (isCancelled) { - status = RCloneJobStatus.CANCELLED; + status = BackupJobStatus.CANCELLED; } else { - status = RCloneJobStatus.ERROR; + status = BackupJobStatus.FAILED; } } else if (isFinished || stats.calculatedPercentage === 100) { - status = RCloneJobStatus.COMPLETED; + status = BackupJobStatus.COMPLETED; } else { - status = RCloneJobStatus.RUNNING; + status = BackupJobStatus.RUNNING; } return { diff --git a/api/src/unraid-api/graph/resolvers/rclone/rclone.model.ts b/api/src/unraid-api/graph/resolvers/rclone/rclone.model.ts index 235c93931..b792aa6f8 100644 --- a/api/src/unraid-api/graph/resolvers/rclone/rclone.model.ts +++ b/api/src/unraid-api/graph/resolvers/rclone/rclone.model.ts @@ -339,8 +339,8 @@ export class RCloneJob { @Field(() => PrefixedID, { description: 'Configuration ID that triggered this job', nullable: true }) configId?: string; - @Field(() => RCloneJobStatus, { description: 'Current status of the job', nullable: true }) - status?: RCloneJobStatus; + @Field(() => BackupJobStatus, { description: 'Current status of the job', nullable: true }) + status?: BackupJobStatus; @Field(() => Boolean, { description: 'Whether the job is finished', nullable: true }) finished?: boolean; @@ -407,14 +407,14 @@ export interface RCloneJobsWithStatsResponse { stats: RCloneJobStats[]; } -export enum RCloneJobStatus { +export enum BackupJobStatus { RUNNING = 'Running', COMPLETED = 'Completed', - ERROR = 'Error', + FAILED = 'Failed', CANCELLED = 'Cancelled', } -registerEnumType(RCloneJobStatus, { - name: 'RCloneJobStatus', - description: 'Status of an RClone job', +registerEnumType(BackupJobStatus, { + name: 'BackupJobStatus', + description: 'Status of a backup job', }); diff --git a/api/src/unraid-api/graph/resolvers/rclone/rclone.module.ts b/api/src/unraid-api/graph/resolvers/rclone/rclone.module.ts index 86cf0e99d..d897925fa 100644 --- a/api/src/unraid-api/graph/resolvers/rclone/rclone.module.ts +++ b/api/src/unraid-api/graph/resolvers/rclone/rclone.module.ts @@ -1,5 +1,6 @@ -import { Module } from '@nestjs/common'; +import { forwardRef, Module } from '@nestjs/common'; +import { PreprocessingModule } from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.module.js'; import { RCloneApiService } from '@app/unraid-api/graph/resolvers/rclone/rclone-api.service.js'; import { RCloneFormService } from '@app/unraid-api/graph/resolvers/rclone/rclone-form.service.js'; import { RCloneStatusService } from '@app/unraid-api/graph/resolvers/rclone/rclone-status.service.js'; @@ -9,7 +10,7 @@ import { RCloneService } from '@app/unraid-api/graph/resolvers/rclone/rclone.ser import { UtilsModule } from '@app/unraid-api/utils/utils.module.js'; @Module({ - imports: [UtilsModule], + imports: [UtilsModule, forwardRef(() => PreprocessingModule)], providers: [ RCloneService, RCloneApiService, diff --git a/api/src/unraid-api/graph/resolvers/rclone/rclone.mutation.resolver.ts b/api/src/unraid-api/graph/resolvers/rclone/rclone.mutation.resolver.ts index 57419ef5b..6f7a955cd 100644 --- a/api/src/unraid-api/graph/resolvers/rclone/rclone.mutation.resolver.ts +++ b/api/src/unraid-api/graph/resolvers/rclone/rclone.mutation.resolver.ts @@ -13,6 +13,7 @@ import { CreateRCloneRemoteInput, DeleteRCloneRemoteInput, RCloneRemote, + RCloneRemoteConfig, } from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js'; /** @@ -37,7 +38,7 @@ export class RCloneMutationsResolver { name: input.name, type: input.type, parameters: {}, - config, + config: config as RCloneRemoteConfig, }; } catch (error) { this.logger.error(`Error creating remote: ${error}`); diff --git a/api/src/unraid-api/main.ts b/api/src/unraid-api/main.ts index b5371246b..659485c6a 100644 --- a/api/src/unraid-api/main.ts +++ b/api/src/unraid-api/main.ts @@ -18,7 +18,8 @@ export async function bootstrapNestServer(): Promise { const app = await NestFactory.create(AppModule, new FastifyAdapter(), { bufferLogs: false, - ...(LOG_LEVEL !== 'TRACE' ? { logger: false } : {}), + + ...(LOG_LEVEL !== 'DEBUG' ? { logger: false } : {}), }); // Enable validation globally diff --git a/unraid-ui/src/forms/JsonForms.vue b/unraid-ui/src/forms/JsonForms.vue new file mode 100644 index 000000000..f987d2ca2 --- /dev/null +++ b/unraid-ui/src/forms/JsonForms.vue @@ -0,0 +1,60 @@ + + + diff --git a/unraid-ui/src/forms/Select.vue b/unraid-ui/src/forms/Select.vue index 5dfb87471..c8207e5bf 100644 --- a/unraid-ui/src/forms/Select.vue +++ b/unraid-ui/src/forms/Select.vue @@ -43,7 +43,7 @@ const onSelectOpen = () => { diff --git a/web/components/Backup/backup-jobs.query.ts b/web/components/Backup/backup-jobs.query.ts index ff558adba..9a0964c9c 100644 --- a/web/components/Backup/backup-jobs.query.ts +++ b/web/components/Backup/backup-jobs.query.ts @@ -50,15 +50,47 @@ export const RCLONE_JOB_FRAGMENT = graphql(/* GraphQL */ ` } `); +export const PREPROCESS_CONFIG_FRAGMENT = graphql(/* GraphQL */ ` + fragment PreprocessConfig on PreprocessConfig { + type + timeout + cleanupOnFailure + zfsConfig { + poolName + datasetName + snapshotPrefix + cleanupSnapshots + retainSnapshots + } + flashConfig { + flashPath + includeGitHistory + additionalPaths + } + scriptConfig { + scriptPath + scriptArgs + workingDirectory + environment + outputPath + } + } +`); + export const BACKUP_JOB_CONFIG_FRAGMENT = graphql(/* GraphQL */ ` fragment BackupJobConfig on BackupJobConfig { id name + backupMode sourcePath remoteName destinationPath schedule enabled + rcloneOptions + preprocessConfig { + ...PreprocessConfig + } createdAt updatedAt lastRunAt @@ -178,7 +210,6 @@ export const TRIGGER_BACKUP_JOB_MUTATION = graphql(/* GraphQL */ ` mutation TriggerBackupJob($id: PrefixedID!) { backup { triggerJob(id: $id) { - status jobId } } diff --git a/web/composables/gql/gql.ts b/web/composables/gql/gql.ts index 0d7d26744..f18f99feb 100644 --- a/web/composables/gql/gql.ts +++ b/web/composables/gql/gql.ts @@ -22,7 +22,8 @@ type Documents = { "\n query ApiKeyMeta {\n apiKeyPossibleRoles\n apiKeyPossiblePermissions {\n resource\n actions\n }\n }\n": typeof types.ApiKeyMetaDocument, "\n fragment BackupStats on RCloneJobStats {\n bytes\n speed\n eta\n elapsedTime\n percentage\n checks\n deletes\n errors\n fatalError\n lastError\n renames\n retryError\n serverSideCopies\n serverSideCopyBytes\n serverSideMoves\n serverSideMoveBytes\n totalBytes\n totalChecks\n totalTransfers\n transferTime\n transfers\n transferring\n checking\n formattedBytes\n formattedSpeed\n formattedElapsedTime\n formattedEta\n calculatedPercentage\n isActivelyRunning\n isCompleted\n }\n": typeof types.BackupStatsFragmentDoc, "\n fragment RCloneJob on RCloneJob {\n id\n group\n configId\n finished\n success\n error\n status\n stats {\n ...BackupStats\n }\n }\n": typeof types.RCloneJobFragmentDoc, - "\n fragment BackupJobConfig on BackupJobConfig {\n id\n name\n sourcePath\n remoteName\n destinationPath\n schedule\n enabled\n createdAt\n updatedAt\n lastRunAt\n lastRunStatus\n currentJobId\n }\n": typeof types.BackupJobConfigFragmentDoc, + "\n fragment PreprocessConfig on PreprocessConfig {\n type\n timeout\n cleanupOnFailure\n zfsConfig {\n poolName\n datasetName\n snapshotPrefix\n cleanupSnapshots\n retainSnapshots\n }\n flashConfig {\n flashPath\n includeGitHistory\n additionalPaths\n }\n scriptConfig {\n scriptPath\n scriptArgs\n workingDirectory\n environment\n outputPath\n }\n }\n": typeof types.PreprocessConfigFragmentDoc, + "\n fragment BackupJobConfig on BackupJobConfig {\n id\n name\n backupMode\n sourcePath\n remoteName\n destinationPath\n schedule\n enabled\n rcloneOptions\n preprocessConfig {\n ...PreprocessConfig\n }\n createdAt\n updatedAt\n lastRunAt\n lastRunStatus\n currentJobId\n }\n": typeof types.BackupJobConfigFragmentDoc, "\n fragment BackupJobConfigWithCurrentJob on BackupJobConfig {\n ...BackupJobConfig\n currentJob {\n ...RCloneJob\n }\n }\n": typeof types.BackupJobConfigWithCurrentJobFragmentDoc, "\n query BackupJobs {\n backup {\n id\n jobs {\n ...RCloneJob\n }\n }\n }\n": typeof types.BackupJobsDocument, "\n query BackupJob($id: PrefixedID!) {\n backupJob(id: $id) {\n ...RCloneJob\n }\n }\n": typeof types.BackupJobDocument, @@ -34,7 +35,7 @@ type Documents = { "\n mutation UpdateBackupJobConfig($id: PrefixedID!, $input: UpdateBackupJobConfigInput!) {\n backup {\n updateBackupJobConfig(id: $id, input: $input) {\n ...BackupJobConfig\n }\n }\n }\n": typeof types.UpdateBackupJobConfigDocument, "\n mutation DeleteBackupJobConfig($id: PrefixedID!) {\n backup {\n deleteBackupJobConfig(id: $id)\n }\n }\n": typeof types.DeleteBackupJobConfigDocument, "\n mutation ToggleBackupJobConfig($id: PrefixedID!) {\n backup {\n toggleJobConfig(id: $id) {\n ...BackupJobConfig\n }\n }\n }\n": typeof types.ToggleBackupJobConfigDocument, - "\n mutation TriggerBackupJob($id: PrefixedID!) {\n backup {\n triggerJob(id: $id) {\n status\n jobId\n }\n }\n }\n": typeof types.TriggerBackupJobDocument, + "\n mutation TriggerBackupJob($id: PrefixedID!) {\n backup {\n triggerJob(id: $id) {\n jobId\n }\n }\n }\n": typeof types.TriggerBackupJobDocument, "\n mutation StopBackupJob($id: PrefixedID!) {\n backup {\n stopBackupJob(id: $id) {\n status\n jobId\n }\n }\n }\n": typeof types.StopBackupJobDocument, "\n mutation InitiateBackup($input: InitiateBackupInput!) {\n backup {\n initiateBackup(input: $input) {\n status\n jobId\n }\n }\n }\n": typeof types.InitiateBackupDocument, "\n subscription BackupJobProgress($id: PrefixedID!) {\n backupJobProgress(id: $id) {\n id\n stats {\n ...BackupStats\n }\n }\n }\n": typeof types.BackupJobProgressDocument, @@ -77,7 +78,8 @@ const documents: Documents = { "\n query ApiKeyMeta {\n apiKeyPossibleRoles\n apiKeyPossiblePermissions {\n resource\n actions\n }\n }\n": types.ApiKeyMetaDocument, "\n fragment BackupStats on RCloneJobStats {\n bytes\n speed\n eta\n elapsedTime\n percentage\n checks\n deletes\n errors\n fatalError\n lastError\n renames\n retryError\n serverSideCopies\n serverSideCopyBytes\n serverSideMoves\n serverSideMoveBytes\n totalBytes\n totalChecks\n totalTransfers\n transferTime\n transfers\n transferring\n checking\n formattedBytes\n formattedSpeed\n formattedElapsedTime\n formattedEta\n calculatedPercentage\n isActivelyRunning\n isCompleted\n }\n": types.BackupStatsFragmentDoc, "\n fragment RCloneJob on RCloneJob {\n id\n group\n configId\n finished\n success\n error\n status\n stats {\n ...BackupStats\n }\n }\n": types.RCloneJobFragmentDoc, - "\n fragment BackupJobConfig on BackupJobConfig {\n id\n name\n sourcePath\n remoteName\n destinationPath\n schedule\n enabled\n createdAt\n updatedAt\n lastRunAt\n lastRunStatus\n currentJobId\n }\n": types.BackupJobConfigFragmentDoc, + "\n fragment PreprocessConfig on PreprocessConfig {\n type\n timeout\n cleanupOnFailure\n zfsConfig {\n poolName\n datasetName\n snapshotPrefix\n cleanupSnapshots\n retainSnapshots\n }\n flashConfig {\n flashPath\n includeGitHistory\n additionalPaths\n }\n scriptConfig {\n scriptPath\n scriptArgs\n workingDirectory\n environment\n outputPath\n }\n }\n": types.PreprocessConfigFragmentDoc, + "\n fragment BackupJobConfig on BackupJobConfig {\n id\n name\n backupMode\n sourcePath\n remoteName\n destinationPath\n schedule\n enabled\n rcloneOptions\n preprocessConfig {\n ...PreprocessConfig\n }\n createdAt\n updatedAt\n lastRunAt\n lastRunStatus\n currentJobId\n }\n": types.BackupJobConfigFragmentDoc, "\n fragment BackupJobConfigWithCurrentJob on BackupJobConfig {\n ...BackupJobConfig\n currentJob {\n ...RCloneJob\n }\n }\n": types.BackupJobConfigWithCurrentJobFragmentDoc, "\n query BackupJobs {\n backup {\n id\n jobs {\n ...RCloneJob\n }\n }\n }\n": types.BackupJobsDocument, "\n query BackupJob($id: PrefixedID!) {\n backupJob(id: $id) {\n ...RCloneJob\n }\n }\n": types.BackupJobDocument, @@ -89,7 +91,7 @@ const documents: Documents = { "\n mutation UpdateBackupJobConfig($id: PrefixedID!, $input: UpdateBackupJobConfigInput!) {\n backup {\n updateBackupJobConfig(id: $id, input: $input) {\n ...BackupJobConfig\n }\n }\n }\n": types.UpdateBackupJobConfigDocument, "\n mutation DeleteBackupJobConfig($id: PrefixedID!) {\n backup {\n deleteBackupJobConfig(id: $id)\n }\n }\n": types.DeleteBackupJobConfigDocument, "\n mutation ToggleBackupJobConfig($id: PrefixedID!) {\n backup {\n toggleJobConfig(id: $id) {\n ...BackupJobConfig\n }\n }\n }\n": types.ToggleBackupJobConfigDocument, - "\n mutation TriggerBackupJob($id: PrefixedID!) {\n backup {\n triggerJob(id: $id) {\n status\n jobId\n }\n }\n }\n": types.TriggerBackupJobDocument, + "\n mutation TriggerBackupJob($id: PrefixedID!) {\n backup {\n triggerJob(id: $id) {\n jobId\n }\n }\n }\n": types.TriggerBackupJobDocument, "\n mutation StopBackupJob($id: PrefixedID!) {\n backup {\n stopBackupJob(id: $id) {\n status\n jobId\n }\n }\n }\n": types.StopBackupJobDocument, "\n mutation InitiateBackup($input: InitiateBackupInput!) {\n backup {\n initiateBackup(input: $input) {\n status\n jobId\n }\n }\n }\n": types.InitiateBackupDocument, "\n subscription BackupJobProgress($id: PrefixedID!) {\n backupJobProgress(id: $id) {\n id\n stats {\n ...BackupStats\n }\n }\n }\n": types.BackupJobProgressDocument, @@ -173,7 +175,11 @@ export function graphql(source: "\n fragment RCloneJob on RCloneJob {\n id\n /** * The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. */ -export function graphql(source: "\n fragment BackupJobConfig on BackupJobConfig {\n id\n name\n sourcePath\n remoteName\n destinationPath\n schedule\n enabled\n createdAt\n updatedAt\n lastRunAt\n lastRunStatus\n currentJobId\n }\n"): (typeof documents)["\n fragment BackupJobConfig on BackupJobConfig {\n id\n name\n sourcePath\n remoteName\n destinationPath\n schedule\n enabled\n createdAt\n updatedAt\n lastRunAt\n lastRunStatus\n currentJobId\n }\n"]; +export function graphql(source: "\n fragment PreprocessConfig on PreprocessConfig {\n type\n timeout\n cleanupOnFailure\n zfsConfig {\n poolName\n datasetName\n snapshotPrefix\n cleanupSnapshots\n retainSnapshots\n }\n flashConfig {\n flashPath\n includeGitHistory\n additionalPaths\n }\n scriptConfig {\n scriptPath\n scriptArgs\n workingDirectory\n environment\n outputPath\n }\n }\n"): (typeof documents)["\n fragment PreprocessConfig on PreprocessConfig {\n type\n timeout\n cleanupOnFailure\n zfsConfig {\n poolName\n datasetName\n snapshotPrefix\n cleanupSnapshots\n retainSnapshots\n }\n flashConfig {\n flashPath\n includeGitHistory\n additionalPaths\n }\n scriptConfig {\n scriptPath\n scriptArgs\n workingDirectory\n environment\n outputPath\n }\n }\n"]; +/** + * The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. + */ +export function graphql(source: "\n fragment BackupJobConfig on BackupJobConfig {\n id\n name\n backupMode\n sourcePath\n remoteName\n destinationPath\n schedule\n enabled\n rcloneOptions\n preprocessConfig {\n ...PreprocessConfig\n }\n createdAt\n updatedAt\n lastRunAt\n lastRunStatus\n currentJobId\n }\n"): (typeof documents)["\n fragment BackupJobConfig on BackupJobConfig {\n id\n name\n backupMode\n sourcePath\n remoteName\n destinationPath\n schedule\n enabled\n rcloneOptions\n preprocessConfig {\n ...PreprocessConfig\n }\n createdAt\n updatedAt\n lastRunAt\n lastRunStatus\n currentJobId\n }\n"]; /** * The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. */ @@ -221,7 +227,7 @@ export function graphql(source: "\n mutation ToggleBackupJobConfig($id: Prefixe /** * The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. */ -export function graphql(source: "\n mutation TriggerBackupJob($id: PrefixedID!) {\n backup {\n triggerJob(id: $id) {\n status\n jobId\n }\n }\n }\n"): (typeof documents)["\n mutation TriggerBackupJob($id: PrefixedID!) {\n backup {\n triggerJob(id: $id) {\n status\n jobId\n }\n }\n }\n"]; +export function graphql(source: "\n mutation TriggerBackupJob($id: PrefixedID!) {\n backup {\n triggerJob(id: $id) {\n jobId\n }\n }\n }\n"): (typeof documents)["\n mutation TriggerBackupJob($id: PrefixedID!) {\n backup {\n triggerJob(id: $id) {\n jobId\n }\n }\n }\n"]; /** * The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. */ diff --git a/web/composables/gql/graphql.ts b/web/composables/gql/graphql.ts index c648f7a7e..d7ff46a78 100644 --- a/web/composables/gql/graphql.ts +++ b/web/composables/gql/graphql.ts @@ -384,6 +384,7 @@ export type Backup = Node & { export type BackupJobConfig = Node & { __typename?: 'BackupJobConfig'; + backupMode: BackupMode; /** When this config was created */ createdAt: Scalars['DateTime']['output']; /** Current running job for this config */ @@ -401,6 +402,8 @@ export type BackupJobConfig = Node & { lastRunStatus?: Maybe; /** Human-readable name for this backup job */ name: Scalars['String']['output']; + /** Preprocessing configuration for this backup job */ + preprocessConfig?: Maybe; /** RClone options (e.g., --transfers, --checkers) */ rcloneOptions?: Maybe; /** Remote name from rclone config */ @@ -424,6 +427,20 @@ export type BackupJobConfigFormInput = { showAdvanced?: Scalars['Boolean']['input']; }; +/** Status of a backup job */ +export enum BackupJobStatus { + CANCELLED = 'CANCELLED', + COMPLETED = 'COMPLETED', + FAILED = 'FAILED', + RUNNING = 'RUNNING' +} + +/** The mode of backup to perform (Raw file backup or Preprocessing-based). */ +export enum BackupMode { + PREPROCESSING = 'PREPROCESSING', + RAW = 'RAW' +} + /** Backup related mutations */ export type BackupMutations = { __typename?: 'BackupMutations'; @@ -651,9 +668,12 @@ export type CreateApiKeyInput = { }; export type CreateBackupJobConfigInput = { + backupMode?: BackupMode; destinationPath: Scalars['String']['input']; enabled?: Scalars['Boolean']['input']; name: Scalars['String']['input']; + /** Preprocessing configuration for this backup job */ + preprocessConfig?: InputMaybe; rcloneOptions?: InputMaybe; remoteName: Scalars['String']['input']; schedule: Scalars['String']['input']; @@ -895,6 +915,22 @@ export type Flash = Node & { vendor: Scalars['String']['output']; }; +export type FlashPreprocessConfig = { + __typename?: 'FlashPreprocessConfig'; + additionalPaths?: Maybe>; + flashPath: Scalars['String']['output']; + includeGitHistory: Scalars['Boolean']['output']; +}; + +export type FlashPreprocessConfigInput = { + /** Additional paths to include in backup */ + additionalPaths?: InputMaybe>; + /** Flash drive mount path */ + flashPath?: Scalars['String']['input']; + /** Whether to include git history */ + includeGitHistory?: Scalars['Boolean']['input']; +}; + export type Gpu = Node & { __typename?: 'Gpu'; blacklisted: Scalars['Boolean']['output']; @@ -1304,6 +1340,36 @@ export type Permission = { resource: Resource; }; +export type PreprocessConfig = { + __typename?: 'PreprocessConfig'; + cleanupOnFailure: Scalars['Boolean']['output']; + flashConfig?: Maybe; + scriptConfig?: Maybe; + timeout: Scalars['Float']['output']; + type: PreprocessType; + zfsConfig?: Maybe; +}; + +export type PreprocessConfigInput = { + /** Whether to cleanup on failure */ + cleanupOnFailure?: Scalars['Boolean']['input']; + flashConfig?: InputMaybe; + scriptConfig?: InputMaybe; + /** Timeout for preprocessing in seconds */ + timeout?: Scalars['Float']['input']; + /** Type of preprocessing to perform */ + type: PreprocessType; + zfsConfig?: InputMaybe; +}; + +/** Type of preprocessing to perform before backup */ +export enum PreprocessType { + FLASH = 'FLASH', + NONE = 'NONE', + SCRIPT = 'SCRIPT', + ZFS = 'ZFS' +} + export type ProfileModel = Node & { __typename?: 'ProfileModel'; avatar: Scalars['String']['output']; @@ -1463,7 +1529,7 @@ export type RCloneJob = { /** Job status and statistics */ stats?: Maybe; /** Current status of the job */ - status?: Maybe; + status?: Maybe; /** Whether the job was successful */ success?: Maybe; }; @@ -1532,14 +1598,6 @@ export type RCloneJobStats = { transfers?: Maybe; }; -/** Status of an RClone job */ -export enum RCloneJobStatus { - CANCELLED = 'CANCELLED', - COMPLETED = 'COMPLETED', - ERROR = 'ERROR', - RUNNING = 'RUNNING' -} - /** RClone related mutations */ export type RCloneMutations = { __typename?: 'RCloneMutations'; @@ -1672,6 +1730,28 @@ export enum Role { GUEST = 'GUEST' } +export type ScriptPreprocessConfig = { + __typename?: 'ScriptPreprocessConfig'; + environment?: Maybe; + outputPath: Scalars['String']['output']; + scriptArgs?: Maybe>; + scriptPath: Scalars['String']['output']; + workingDirectory?: Maybe; +}; + +export type ScriptPreprocessConfigInput = { + /** Environment variables for script execution */ + environment?: InputMaybe; + /** Output file path where script should write data */ + outputPath: Scalars['String']['input']; + /** Arguments to pass to the script */ + scriptArgs?: InputMaybe>; + /** Path to the script file */ + scriptPath: Scalars['String']['input']; + /** Working directory for script execution */ + workingDirectory?: InputMaybe; +}; + export type Server = Node & { __typename?: 'Server'; apikey: Scalars['String']['output']; @@ -1841,10 +1921,14 @@ export type UnraidArray = Node & { }; export type UpdateBackupJobConfigInput = { + currentJobId?: InputMaybe; destinationPath?: InputMaybe; enabled?: InputMaybe; + lastRunAt?: InputMaybe; lastRunStatus?: InputMaybe; name?: InputMaybe; + /** Preprocessing configuration for this backup job */ + preprocessConfig?: InputMaybe; rcloneOptions?: InputMaybe; remoteName?: InputMaybe; schedule?: InputMaybe; @@ -2168,6 +2252,28 @@ export enum WanForwardType { UPNP = 'UPNP' } +export type ZfsPreprocessConfig = { + __typename?: 'ZfsPreprocessConfig'; + cleanupSnapshots: Scalars['Boolean']['output']; + datasetName: Scalars['String']['output']; + poolName: Scalars['String']['output']; + retainSnapshots?: Maybe; + snapshotPrefix?: Maybe; +}; + +export type ZfsPreprocessConfigInput = { + /** Whether to cleanup snapshots after backup */ + cleanupSnapshots?: Scalars['Boolean']['input']; + /** Dataset name within the pool */ + datasetName: Scalars['String']['input']; + /** ZFS pool name */ + poolName: Scalars['String']['input']; + /** Number of snapshots to retain */ + retainSnapshots?: InputMaybe; + /** Snapshot name prefix */ + snapshotPrefix?: InputMaybe; +}; + export enum RegistrationType { BASIC = 'BASIC', INVALID = 'INVALID', @@ -2215,12 +2321,17 @@ export type ApiKeyMetaQuery = { __typename?: 'Query', apiKeyPossibleRoles: Array export type BackupStatsFragment = { __typename?: 'RCloneJobStats', bytes?: number | null, speed?: number | null, eta?: number | null, elapsedTime?: number | null, percentage?: number | null, checks?: number | null, deletes?: number | null, errors?: number | null, fatalError?: boolean | null, lastError?: string | null, renames?: number | null, retryError?: boolean | null, serverSideCopies?: number | null, serverSideCopyBytes?: number | null, serverSideMoves?: number | null, serverSideMoveBytes?: number | null, totalBytes?: number | null, totalChecks?: number | null, totalTransfers?: number | null, transferTime?: number | null, transfers?: number | null, transferring?: any | null, checking?: any | null, formattedBytes?: string | null, formattedSpeed?: string | null, formattedElapsedTime?: string | null, formattedEta?: string | null, calculatedPercentage?: number | null, isActivelyRunning?: boolean | null, isCompleted?: boolean | null } & { ' $fragmentName'?: 'BackupStatsFragment' }; -export type RCloneJobFragment = { __typename?: 'RCloneJob', id: string, group?: string | null, configId?: string | null, finished?: boolean | null, success?: boolean | null, error?: string | null, status?: RCloneJobStatus | null, stats?: ( +export type RCloneJobFragment = { __typename?: 'RCloneJob', id: string, group?: string | null, configId?: string | null, finished?: boolean | null, success?: boolean | null, error?: string | null, status?: BackupJobStatus | null, stats?: ( { __typename?: 'RCloneJobStats' } & { ' $fragmentRefs'?: { 'BackupStatsFragment': BackupStatsFragment } } ) | null } & { ' $fragmentName'?: 'RCloneJobFragment' }; -export type BackupJobConfigFragment = { __typename?: 'BackupJobConfig', id: string, name: string, sourcePath: string, remoteName: string, destinationPath: string, schedule: string, enabled: boolean, createdAt: string, updatedAt: string, lastRunAt?: string | null, lastRunStatus?: string | null, currentJobId?: string | null } & { ' $fragmentName'?: 'BackupJobConfigFragment' }; +export type PreprocessConfigFragment = { __typename?: 'PreprocessConfig', type: PreprocessType, timeout: number, cleanupOnFailure: boolean, zfsConfig?: { __typename?: 'ZfsPreprocessConfig', poolName: string, datasetName: string, snapshotPrefix?: string | null, cleanupSnapshots: boolean, retainSnapshots?: number | null } | null, flashConfig?: { __typename?: 'FlashPreprocessConfig', flashPath: string, includeGitHistory: boolean, additionalPaths?: Array | null } | null, scriptConfig?: { __typename?: 'ScriptPreprocessConfig', scriptPath: string, scriptArgs?: Array | null, workingDirectory?: string | null, environment?: any | null, outputPath: string } | null } & { ' $fragmentName'?: 'PreprocessConfigFragment' }; + +export type BackupJobConfigFragment = { __typename?: 'BackupJobConfig', id: string, name: string, backupMode: BackupMode, sourcePath: string, remoteName: string, destinationPath: string, schedule: string, enabled: boolean, rcloneOptions?: any | null, createdAt: string, updatedAt: string, lastRunAt?: string | null, lastRunStatus?: string | null, currentJobId?: string | null, preprocessConfig?: ( + { __typename?: 'PreprocessConfig' } + & { ' $fragmentRefs'?: { 'PreprocessConfigFragment': PreprocessConfigFragment } } + ) | null } & { ' $fragmentName'?: 'BackupJobConfigFragment' }; export type BackupJobConfigWithCurrentJobFragment = ( { __typename?: 'BackupJobConfig', currentJob?: ( @@ -2321,7 +2432,7 @@ export type TriggerBackupJobMutationVariables = Exact<{ }>; -export type TriggerBackupJobMutation = { __typename?: 'Mutation', backup: { __typename?: 'BackupMutations', triggerJob: { __typename?: 'BackupStatus', status: string, jobId?: string | null } } }; +export type TriggerBackupJobMutation = { __typename?: 'Mutation', backup: { __typename?: 'BackupMutations', triggerJob: { __typename?: 'BackupStatus', jobId?: string | null } } }; export type StopBackupJobMutationVariables = Exact<{ id: Scalars['PrefixedID']['input']; @@ -2534,10 +2645,11 @@ export type SetupRemoteAccessMutationVariables = Exact<{ export type SetupRemoteAccessMutation = { __typename?: 'Mutation', setupRemoteAccess: boolean }; -export const BackupJobConfigFragmentDoc = {"kind":"Document","definitions":[{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupJobConfig"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BackupJobConfig"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"sourcePath"}},{"kind":"Field","name":{"kind":"Name","value":"remoteName"}},{"kind":"Field","name":{"kind":"Name","value":"destinationPath"}},{"kind":"Field","name":{"kind":"Name","value":"schedule"}},{"kind":"Field","name":{"kind":"Name","value":"enabled"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastRunAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastRunStatus"}},{"kind":"Field","name":{"kind":"Name","value":"currentJobId"}}]}}]} as unknown as DocumentNode; +export const PreprocessConfigFragmentDoc = {"kind":"Document","definitions":[{"kind":"FragmentDefinition","name":{"kind":"Name","value":"PreprocessConfig"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"PreprocessConfig"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"type"}},{"kind":"Field","name":{"kind":"Name","value":"timeout"}},{"kind":"Field","name":{"kind":"Name","value":"cleanupOnFailure"}},{"kind":"Field","name":{"kind":"Name","value":"zfsConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"poolName"}},{"kind":"Field","name":{"kind":"Name","value":"datasetName"}},{"kind":"Field","name":{"kind":"Name","value":"snapshotPrefix"}},{"kind":"Field","name":{"kind":"Name","value":"cleanupSnapshots"}},{"kind":"Field","name":{"kind":"Name","value":"retainSnapshots"}}]}},{"kind":"Field","name":{"kind":"Name","value":"flashConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"flashPath"}},{"kind":"Field","name":{"kind":"Name","value":"includeGitHistory"}},{"kind":"Field","name":{"kind":"Name","value":"additionalPaths"}}]}},{"kind":"Field","name":{"kind":"Name","value":"scriptConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"scriptPath"}},{"kind":"Field","name":{"kind":"Name","value":"scriptArgs"}},{"kind":"Field","name":{"kind":"Name","value":"workingDirectory"}},{"kind":"Field","name":{"kind":"Name","value":"environment"}},{"kind":"Field","name":{"kind":"Name","value":"outputPath"}}]}}]}}]} as unknown as DocumentNode; +export const BackupJobConfigFragmentDoc = {"kind":"Document","definitions":[{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupJobConfig"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BackupJobConfig"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"backupMode"}},{"kind":"Field","name":{"kind":"Name","value":"sourcePath"}},{"kind":"Field","name":{"kind":"Name","value":"remoteName"}},{"kind":"Field","name":{"kind":"Name","value":"destinationPath"}},{"kind":"Field","name":{"kind":"Name","value":"schedule"}},{"kind":"Field","name":{"kind":"Name","value":"enabled"}},{"kind":"Field","name":{"kind":"Name","value":"rcloneOptions"}},{"kind":"Field","name":{"kind":"Name","value":"preprocessConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"PreprocessConfig"}}]}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastRunAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastRunStatus"}},{"kind":"Field","name":{"kind":"Name","value":"currentJobId"}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"PreprocessConfig"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"PreprocessConfig"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"type"}},{"kind":"Field","name":{"kind":"Name","value":"timeout"}},{"kind":"Field","name":{"kind":"Name","value":"cleanupOnFailure"}},{"kind":"Field","name":{"kind":"Name","value":"zfsConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"poolName"}},{"kind":"Field","name":{"kind":"Name","value":"datasetName"}},{"kind":"Field","name":{"kind":"Name","value":"snapshotPrefix"}},{"kind":"Field","name":{"kind":"Name","value":"cleanupSnapshots"}},{"kind":"Field","name":{"kind":"Name","value":"retainSnapshots"}}]}},{"kind":"Field","name":{"kind":"Name","value":"flashConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"flashPath"}},{"kind":"Field","name":{"kind":"Name","value":"includeGitHistory"}},{"kind":"Field","name":{"kind":"Name","value":"additionalPaths"}}]}},{"kind":"Field","name":{"kind":"Name","value":"scriptConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"scriptPath"}},{"kind":"Field","name":{"kind":"Name","value":"scriptArgs"}},{"kind":"Field","name":{"kind":"Name","value":"workingDirectory"}},{"kind":"Field","name":{"kind":"Name","value":"environment"}},{"kind":"Field","name":{"kind":"Name","value":"outputPath"}}]}}]}}]} as unknown as DocumentNode; export const BackupStatsFragmentDoc = {"kind":"Document","definitions":[{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupStats"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"RCloneJobStats"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"bytes"}},{"kind":"Field","name":{"kind":"Name","value":"speed"}},{"kind":"Field","name":{"kind":"Name","value":"eta"}},{"kind":"Field","name":{"kind":"Name","value":"elapsedTime"}},{"kind":"Field","name":{"kind":"Name","value":"percentage"}},{"kind":"Field","name":{"kind":"Name","value":"checks"}},{"kind":"Field","name":{"kind":"Name","value":"deletes"}},{"kind":"Field","name":{"kind":"Name","value":"errors"}},{"kind":"Field","name":{"kind":"Name","value":"fatalError"}},{"kind":"Field","name":{"kind":"Name","value":"lastError"}},{"kind":"Field","name":{"kind":"Name","value":"renames"}},{"kind":"Field","name":{"kind":"Name","value":"retryError"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideCopies"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideCopyBytes"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideMoves"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideMoveBytes"}},{"kind":"Field","name":{"kind":"Name","value":"totalBytes"}},{"kind":"Field","name":{"kind":"Name","value":"totalChecks"}},{"kind":"Field","name":{"kind":"Name","value":"totalTransfers"}},{"kind":"Field","name":{"kind":"Name","value":"transferTime"}},{"kind":"Field","name":{"kind":"Name","value":"transfers"}},{"kind":"Field","name":{"kind":"Name","value":"transferring"}},{"kind":"Field","name":{"kind":"Name","value":"checking"}},{"kind":"Field","name":{"kind":"Name","value":"formattedBytes"}},{"kind":"Field","name":{"kind":"Name","value":"formattedSpeed"}},{"kind":"Field","name":{"kind":"Name","value":"formattedElapsedTime"}},{"kind":"Field","name":{"kind":"Name","value":"formattedEta"}},{"kind":"Field","name":{"kind":"Name","value":"calculatedPercentage"}},{"kind":"Field","name":{"kind":"Name","value":"isActivelyRunning"}},{"kind":"Field","name":{"kind":"Name","value":"isCompleted"}}]}}]} as unknown as DocumentNode; export const RCloneJobFragmentDoc = {"kind":"Document","definitions":[{"kind":"FragmentDefinition","name":{"kind":"Name","value":"RCloneJob"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"RCloneJob"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"group"}},{"kind":"Field","name":{"kind":"Name","value":"configId"}},{"kind":"Field","name":{"kind":"Name","value":"finished"}},{"kind":"Field","name":{"kind":"Name","value":"success"}},{"kind":"Field","name":{"kind":"Name","value":"error"}},{"kind":"Field","name":{"kind":"Name","value":"status"}},{"kind":"Field","name":{"kind":"Name","value":"stats"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BackupStats"}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupStats"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"RCloneJobStats"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"bytes"}},{"kind":"Field","name":{"kind":"Name","value":"speed"}},{"kind":"Field","name":{"kind":"Name","value":"eta"}},{"kind":"Field","name":{"kind":"Name","value":"elapsedTime"}},{"kind":"Field","name":{"kind":"Name","value":"percentage"}},{"kind":"Field","name":{"kind":"Name","value":"checks"}},{"kind":"Field","name":{"kind":"Name","value":"deletes"}},{"kind":"Field","name":{"kind":"Name","value":"errors"}},{"kind":"Field","name":{"kind":"Name","value":"fatalError"}},{"kind":"Field","name":{"kind":"Name","value":"lastError"}},{"kind":"Field","name":{"kind":"Name","value":"renames"}},{"kind":"Field","name":{"kind":"Name","value":"retryError"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideCopies"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideCopyBytes"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideMoves"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideMoveBytes"}},{"kind":"Field","name":{"kind":"Name","value":"totalBytes"}},{"kind":"Field","name":{"kind":"Name","value":"totalChecks"}},{"kind":"Field","name":{"kind":"Name","value":"totalTransfers"}},{"kind":"Field","name":{"kind":"Name","value":"transferTime"}},{"kind":"Field","name":{"kind":"Name","value":"transfers"}},{"kind":"Field","name":{"kind":"Name","value":"transferring"}},{"kind":"Field","name":{"kind":"Name","value":"checking"}},{"kind":"Field","name":{"kind":"Name","value":"formattedBytes"}},{"kind":"Field","name":{"kind":"Name","value":"formattedSpeed"}},{"kind":"Field","name":{"kind":"Name","value":"formattedElapsedTime"}},{"kind":"Field","name":{"kind":"Name","value":"formattedEta"}},{"kind":"Field","name":{"kind":"Name","value":"calculatedPercentage"}},{"kind":"Field","name":{"kind":"Name","value":"isActivelyRunning"}},{"kind":"Field","name":{"kind":"Name","value":"isCompleted"}}]}}]} as unknown as DocumentNode; -export const BackupJobConfigWithCurrentJobFragmentDoc = {"kind":"Document","definitions":[{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupJobConfigWithCurrentJob"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BackupJobConfig"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BackupJobConfig"}},{"kind":"Field","name":{"kind":"Name","value":"currentJob"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"RCloneJob"}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupStats"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"RCloneJobStats"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"bytes"}},{"kind":"Field","name":{"kind":"Name","value":"speed"}},{"kind":"Field","name":{"kind":"Name","value":"eta"}},{"kind":"Field","name":{"kind":"Name","value":"elapsedTime"}},{"kind":"Field","name":{"kind":"Name","value":"percentage"}},{"kind":"Field","name":{"kind":"Name","value":"checks"}},{"kind":"Field","name":{"kind":"Name","value":"deletes"}},{"kind":"Field","name":{"kind":"Name","value":"errors"}},{"kind":"Field","name":{"kind":"Name","value":"fatalError"}},{"kind":"Field","name":{"kind":"Name","value":"lastError"}},{"kind":"Field","name":{"kind":"Name","value":"renames"}},{"kind":"Field","name":{"kind":"Name","value":"retryError"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideCopies"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideCopyBytes"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideMoves"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideMoveBytes"}},{"kind":"Field","name":{"kind":"Name","value":"totalBytes"}},{"kind":"Field","name":{"kind":"Name","value":"totalChecks"}},{"kind":"Field","name":{"kind":"Name","value":"totalTransfers"}},{"kind":"Field","name":{"kind":"Name","value":"transferTime"}},{"kind":"Field","name":{"kind":"Name","value":"transfers"}},{"kind":"Field","name":{"kind":"Name","value":"transferring"}},{"kind":"Field","name":{"kind":"Name","value":"checking"}},{"kind":"Field","name":{"kind":"Name","value":"formattedBytes"}},{"kind":"Field","name":{"kind":"Name","value":"formattedSpeed"}},{"kind":"Field","name":{"kind":"Name","value":"formattedElapsedTime"}},{"kind":"Field","name":{"kind":"Name","value":"formattedEta"}},{"kind":"Field","name":{"kind":"Name","value":"calculatedPercentage"}},{"kind":"Field","name":{"kind":"Name","value":"isActivelyRunning"}},{"kind":"Field","name":{"kind":"Name","value":"isCompleted"}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupJobConfig"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BackupJobConfig"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"sourcePath"}},{"kind":"Field","name":{"kind":"Name","value":"remoteName"}},{"kind":"Field","name":{"kind":"Name","value":"destinationPath"}},{"kind":"Field","name":{"kind":"Name","value":"schedule"}},{"kind":"Field","name":{"kind":"Name","value":"enabled"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastRunAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastRunStatus"}},{"kind":"Field","name":{"kind":"Name","value":"currentJobId"}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"RCloneJob"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"RCloneJob"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"group"}},{"kind":"Field","name":{"kind":"Name","value":"configId"}},{"kind":"Field","name":{"kind":"Name","value":"finished"}},{"kind":"Field","name":{"kind":"Name","value":"success"}},{"kind":"Field","name":{"kind":"Name","value":"error"}},{"kind":"Field","name":{"kind":"Name","value":"status"}},{"kind":"Field","name":{"kind":"Name","value":"stats"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BackupStats"}}]}}]}}]} as unknown as DocumentNode; +export const BackupJobConfigWithCurrentJobFragmentDoc = {"kind":"Document","definitions":[{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupJobConfigWithCurrentJob"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BackupJobConfig"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BackupJobConfig"}},{"kind":"Field","name":{"kind":"Name","value":"currentJob"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"RCloneJob"}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"PreprocessConfig"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"PreprocessConfig"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"type"}},{"kind":"Field","name":{"kind":"Name","value":"timeout"}},{"kind":"Field","name":{"kind":"Name","value":"cleanupOnFailure"}},{"kind":"Field","name":{"kind":"Name","value":"zfsConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"poolName"}},{"kind":"Field","name":{"kind":"Name","value":"datasetName"}},{"kind":"Field","name":{"kind":"Name","value":"snapshotPrefix"}},{"kind":"Field","name":{"kind":"Name","value":"cleanupSnapshots"}},{"kind":"Field","name":{"kind":"Name","value":"retainSnapshots"}}]}},{"kind":"Field","name":{"kind":"Name","value":"flashConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"flashPath"}},{"kind":"Field","name":{"kind":"Name","value":"includeGitHistory"}},{"kind":"Field","name":{"kind":"Name","value":"additionalPaths"}}]}},{"kind":"Field","name":{"kind":"Name","value":"scriptConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"scriptPath"}},{"kind":"Field","name":{"kind":"Name","value":"scriptArgs"}},{"kind":"Field","name":{"kind":"Name","value":"workingDirectory"}},{"kind":"Field","name":{"kind":"Name","value":"environment"}},{"kind":"Field","name":{"kind":"Name","value":"outputPath"}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupStats"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"RCloneJobStats"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"bytes"}},{"kind":"Field","name":{"kind":"Name","value":"speed"}},{"kind":"Field","name":{"kind":"Name","value":"eta"}},{"kind":"Field","name":{"kind":"Name","value":"elapsedTime"}},{"kind":"Field","name":{"kind":"Name","value":"percentage"}},{"kind":"Field","name":{"kind":"Name","value":"checks"}},{"kind":"Field","name":{"kind":"Name","value":"deletes"}},{"kind":"Field","name":{"kind":"Name","value":"errors"}},{"kind":"Field","name":{"kind":"Name","value":"fatalError"}},{"kind":"Field","name":{"kind":"Name","value":"lastError"}},{"kind":"Field","name":{"kind":"Name","value":"renames"}},{"kind":"Field","name":{"kind":"Name","value":"retryError"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideCopies"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideCopyBytes"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideMoves"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideMoveBytes"}},{"kind":"Field","name":{"kind":"Name","value":"totalBytes"}},{"kind":"Field","name":{"kind":"Name","value":"totalChecks"}},{"kind":"Field","name":{"kind":"Name","value":"totalTransfers"}},{"kind":"Field","name":{"kind":"Name","value":"transferTime"}},{"kind":"Field","name":{"kind":"Name","value":"transfers"}},{"kind":"Field","name":{"kind":"Name","value":"transferring"}},{"kind":"Field","name":{"kind":"Name","value":"checking"}},{"kind":"Field","name":{"kind":"Name","value":"formattedBytes"}},{"kind":"Field","name":{"kind":"Name","value":"formattedSpeed"}},{"kind":"Field","name":{"kind":"Name","value":"formattedElapsedTime"}},{"kind":"Field","name":{"kind":"Name","value":"formattedEta"}},{"kind":"Field","name":{"kind":"Name","value":"calculatedPercentage"}},{"kind":"Field","name":{"kind":"Name","value":"isActivelyRunning"}},{"kind":"Field","name":{"kind":"Name","value":"isCompleted"}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupJobConfig"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BackupJobConfig"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"backupMode"}},{"kind":"Field","name":{"kind":"Name","value":"sourcePath"}},{"kind":"Field","name":{"kind":"Name","value":"remoteName"}},{"kind":"Field","name":{"kind":"Name","value":"destinationPath"}},{"kind":"Field","name":{"kind":"Name","value":"schedule"}},{"kind":"Field","name":{"kind":"Name","value":"enabled"}},{"kind":"Field","name":{"kind":"Name","value":"rcloneOptions"}},{"kind":"Field","name":{"kind":"Name","value":"preprocessConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"PreprocessConfig"}}]}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastRunAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastRunStatus"}},{"kind":"Field","name":{"kind":"Name","value":"currentJobId"}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"RCloneJob"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"RCloneJob"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"group"}},{"kind":"Field","name":{"kind":"Name","value":"configId"}},{"kind":"Field","name":{"kind":"Name","value":"finished"}},{"kind":"Field","name":{"kind":"Name","value":"success"}},{"kind":"Field","name":{"kind":"Name","value":"error"}},{"kind":"Field","name":{"kind":"Name","value":"status"}},{"kind":"Field","name":{"kind":"Name","value":"stats"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BackupStats"}}]}}]}}]} as unknown as DocumentNode; export const NotificationFragmentFragmentDoc = {"kind":"Document","definitions":[{"kind":"FragmentDefinition","name":{"kind":"Name","value":"NotificationFragment"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"Notification"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"title"}},{"kind":"Field","name":{"kind":"Name","value":"subject"}},{"kind":"Field","name":{"kind":"Name","value":"description"}},{"kind":"Field","name":{"kind":"Name","value":"importance"}},{"kind":"Field","name":{"kind":"Name","value":"link"}},{"kind":"Field","name":{"kind":"Name","value":"type"}},{"kind":"Field","name":{"kind":"Name","value":"timestamp"}},{"kind":"Field","name":{"kind":"Name","value":"formattedTimestamp"}}]}}]} as unknown as DocumentNode; export const NotificationCountFragmentFragmentDoc = {"kind":"Document","definitions":[{"kind":"FragmentDefinition","name":{"kind":"Name","value":"NotificationCountFragment"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"NotificationCounts"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"total"}},{"kind":"Field","name":{"kind":"Name","value":"info"}},{"kind":"Field","name":{"kind":"Name","value":"warning"}},{"kind":"Field","name":{"kind":"Name","value":"alert"}}]}}]} as unknown as DocumentNode; export const PartialCloudFragmentDoc = {"kind":"Document","definitions":[{"kind":"FragmentDefinition","name":{"kind":"Name","value":"PartialCloud"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"Cloud"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"error"}},{"kind":"Field","name":{"kind":"Name","value":"apiKey"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"valid"}},{"kind":"Field","name":{"kind":"Name","value":"error"}}]}},{"kind":"Field","name":{"kind":"Name","value":"cloud"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"status"}},{"kind":"Field","name":{"kind":"Name","value":"error"}}]}},{"kind":"Field","name":{"kind":"Name","value":"minigraphql"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"status"}},{"kind":"Field","name":{"kind":"Name","value":"error"}}]}},{"kind":"Field","name":{"kind":"Name","value":"relay"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"status"}},{"kind":"Field","name":{"kind":"Name","value":"error"}}]}}]}}]} as unknown as DocumentNode; @@ -2549,15 +2661,15 @@ export const DeleteApiKeyDocument = {"kind":"Document","definitions":[{"kind":"O export const ApiKeyMetaDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"ApiKeyMeta"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"apiKeyPossibleRoles"}},{"kind":"Field","name":{"kind":"Name","value":"apiKeyPossiblePermissions"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"resource"}},{"kind":"Field","name":{"kind":"Name","value":"actions"}}]}}]}}]} as unknown as DocumentNode; export const BackupJobsDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"BackupJobs"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"backup"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"jobs"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"RCloneJob"}}]}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupStats"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"RCloneJobStats"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"bytes"}},{"kind":"Field","name":{"kind":"Name","value":"speed"}},{"kind":"Field","name":{"kind":"Name","value":"eta"}},{"kind":"Field","name":{"kind":"Name","value":"elapsedTime"}},{"kind":"Field","name":{"kind":"Name","value":"percentage"}},{"kind":"Field","name":{"kind":"Name","value":"checks"}},{"kind":"Field","name":{"kind":"Name","value":"deletes"}},{"kind":"Field","name":{"kind":"Name","value":"errors"}},{"kind":"Field","name":{"kind":"Name","value":"fatalError"}},{"kind":"Field","name":{"kind":"Name","value":"lastError"}},{"kind":"Field","name":{"kind":"Name","value":"renames"}},{"kind":"Field","name":{"kind":"Name","value":"retryError"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideCopies"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideCopyBytes"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideMoves"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideMoveBytes"}},{"kind":"Field","name":{"kind":"Name","value":"totalBytes"}},{"kind":"Field","name":{"kind":"Name","value":"totalChecks"}},{"kind":"Field","name":{"kind":"Name","value":"totalTransfers"}},{"kind":"Field","name":{"kind":"Name","value":"transferTime"}},{"kind":"Field","name":{"kind":"Name","value":"transfers"}},{"kind":"Field","name":{"kind":"Name","value":"transferring"}},{"kind":"Field","name":{"kind":"Name","value":"checking"}},{"kind":"Field","name":{"kind":"Name","value":"formattedBytes"}},{"kind":"Field","name":{"kind":"Name","value":"formattedSpeed"}},{"kind":"Field","name":{"kind":"Name","value":"formattedElapsedTime"}},{"kind":"Field","name":{"kind":"Name","value":"formattedEta"}},{"kind":"Field","name":{"kind":"Name","value":"calculatedPercentage"}},{"kind":"Field","name":{"kind":"Name","value":"isActivelyRunning"}},{"kind":"Field","name":{"kind":"Name","value":"isCompleted"}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"RCloneJob"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"RCloneJob"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"group"}},{"kind":"Field","name":{"kind":"Name","value":"configId"}},{"kind":"Field","name":{"kind":"Name","value":"finished"}},{"kind":"Field","name":{"kind":"Name","value":"success"}},{"kind":"Field","name":{"kind":"Name","value":"error"}},{"kind":"Field","name":{"kind":"Name","value":"status"}},{"kind":"Field","name":{"kind":"Name","value":"stats"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BackupStats"}}]}}]}}]} as unknown as DocumentNode; export const BackupJobDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"BackupJob"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"id"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"PrefixedID"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"backupJob"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"id"},"value":{"kind":"Variable","name":{"kind":"Name","value":"id"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"RCloneJob"}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupStats"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"RCloneJobStats"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"bytes"}},{"kind":"Field","name":{"kind":"Name","value":"speed"}},{"kind":"Field","name":{"kind":"Name","value":"eta"}},{"kind":"Field","name":{"kind":"Name","value":"elapsedTime"}},{"kind":"Field","name":{"kind":"Name","value":"percentage"}},{"kind":"Field","name":{"kind":"Name","value":"checks"}},{"kind":"Field","name":{"kind":"Name","value":"deletes"}},{"kind":"Field","name":{"kind":"Name","value":"errors"}},{"kind":"Field","name":{"kind":"Name","value":"fatalError"}},{"kind":"Field","name":{"kind":"Name","value":"lastError"}},{"kind":"Field","name":{"kind":"Name","value":"renames"}},{"kind":"Field","name":{"kind":"Name","value":"retryError"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideCopies"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideCopyBytes"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideMoves"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideMoveBytes"}},{"kind":"Field","name":{"kind":"Name","value":"totalBytes"}},{"kind":"Field","name":{"kind":"Name","value":"totalChecks"}},{"kind":"Field","name":{"kind":"Name","value":"totalTransfers"}},{"kind":"Field","name":{"kind":"Name","value":"transferTime"}},{"kind":"Field","name":{"kind":"Name","value":"transfers"}},{"kind":"Field","name":{"kind":"Name","value":"transferring"}},{"kind":"Field","name":{"kind":"Name","value":"checking"}},{"kind":"Field","name":{"kind":"Name","value":"formattedBytes"}},{"kind":"Field","name":{"kind":"Name","value":"formattedSpeed"}},{"kind":"Field","name":{"kind":"Name","value":"formattedElapsedTime"}},{"kind":"Field","name":{"kind":"Name","value":"formattedEta"}},{"kind":"Field","name":{"kind":"Name","value":"calculatedPercentage"}},{"kind":"Field","name":{"kind":"Name","value":"isActivelyRunning"}},{"kind":"Field","name":{"kind":"Name","value":"isCompleted"}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"RCloneJob"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"RCloneJob"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"group"}},{"kind":"Field","name":{"kind":"Name","value":"configId"}},{"kind":"Field","name":{"kind":"Name","value":"finished"}},{"kind":"Field","name":{"kind":"Name","value":"success"}},{"kind":"Field","name":{"kind":"Name","value":"error"}},{"kind":"Field","name":{"kind":"Name","value":"status"}},{"kind":"Field","name":{"kind":"Name","value":"stats"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BackupStats"}}]}}]}}]} as unknown as DocumentNode; -export const BackupJobConfigDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"BackupJobConfig"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"id"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"PrefixedID"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"backupJobConfig"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"id"},"value":{"kind":"Variable","name":{"kind":"Name","value":"id"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BackupJobConfigWithCurrentJob"}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupJobConfig"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BackupJobConfig"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"sourcePath"}},{"kind":"Field","name":{"kind":"Name","value":"remoteName"}},{"kind":"Field","name":{"kind":"Name","value":"destinationPath"}},{"kind":"Field","name":{"kind":"Name","value":"schedule"}},{"kind":"Field","name":{"kind":"Name","value":"enabled"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastRunAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastRunStatus"}},{"kind":"Field","name":{"kind":"Name","value":"currentJobId"}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupStats"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"RCloneJobStats"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"bytes"}},{"kind":"Field","name":{"kind":"Name","value":"speed"}},{"kind":"Field","name":{"kind":"Name","value":"eta"}},{"kind":"Field","name":{"kind":"Name","value":"elapsedTime"}},{"kind":"Field","name":{"kind":"Name","value":"percentage"}},{"kind":"Field","name":{"kind":"Name","value":"checks"}},{"kind":"Field","name":{"kind":"Name","value":"deletes"}},{"kind":"Field","name":{"kind":"Name","value":"errors"}},{"kind":"Field","name":{"kind":"Name","value":"fatalError"}},{"kind":"Field","name":{"kind":"Name","value":"lastError"}},{"kind":"Field","name":{"kind":"Name","value":"renames"}},{"kind":"Field","name":{"kind":"Name","value":"retryError"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideCopies"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideCopyBytes"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideMoves"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideMoveBytes"}},{"kind":"Field","name":{"kind":"Name","value":"totalBytes"}},{"kind":"Field","name":{"kind":"Name","value":"totalChecks"}},{"kind":"Field","name":{"kind":"Name","value":"totalTransfers"}},{"kind":"Field","name":{"kind":"Name","value":"transferTime"}},{"kind":"Field","name":{"kind":"Name","value":"transfers"}},{"kind":"Field","name":{"kind":"Name","value":"transferring"}},{"kind":"Field","name":{"kind":"Name","value":"checking"}},{"kind":"Field","name":{"kind":"Name","value":"formattedBytes"}},{"kind":"Field","name":{"kind":"Name","value":"formattedSpeed"}},{"kind":"Field","name":{"kind":"Name","value":"formattedElapsedTime"}},{"kind":"Field","name":{"kind":"Name","value":"formattedEta"}},{"kind":"Field","name":{"kind":"Name","value":"calculatedPercentage"}},{"kind":"Field","name":{"kind":"Name","value":"isActivelyRunning"}},{"kind":"Field","name":{"kind":"Name","value":"isCompleted"}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"RCloneJob"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"RCloneJob"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"group"}},{"kind":"Field","name":{"kind":"Name","value":"configId"}},{"kind":"Field","name":{"kind":"Name","value":"finished"}},{"kind":"Field","name":{"kind":"Name","value":"success"}},{"kind":"Field","name":{"kind":"Name","value":"error"}},{"kind":"Field","name":{"kind":"Name","value":"status"}},{"kind":"Field","name":{"kind":"Name","value":"stats"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BackupStats"}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupJobConfigWithCurrentJob"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BackupJobConfig"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BackupJobConfig"}},{"kind":"Field","name":{"kind":"Name","value":"currentJob"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"RCloneJob"}}]}}]}}]} as unknown as DocumentNode; -export const BackupJobConfigsDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"BackupJobConfigs"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"backup"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"configs"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BackupJobConfigWithCurrentJob"}}]}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupJobConfig"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BackupJobConfig"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"sourcePath"}},{"kind":"Field","name":{"kind":"Name","value":"remoteName"}},{"kind":"Field","name":{"kind":"Name","value":"destinationPath"}},{"kind":"Field","name":{"kind":"Name","value":"schedule"}},{"kind":"Field","name":{"kind":"Name","value":"enabled"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastRunAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastRunStatus"}},{"kind":"Field","name":{"kind":"Name","value":"currentJobId"}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupStats"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"RCloneJobStats"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"bytes"}},{"kind":"Field","name":{"kind":"Name","value":"speed"}},{"kind":"Field","name":{"kind":"Name","value":"eta"}},{"kind":"Field","name":{"kind":"Name","value":"elapsedTime"}},{"kind":"Field","name":{"kind":"Name","value":"percentage"}},{"kind":"Field","name":{"kind":"Name","value":"checks"}},{"kind":"Field","name":{"kind":"Name","value":"deletes"}},{"kind":"Field","name":{"kind":"Name","value":"errors"}},{"kind":"Field","name":{"kind":"Name","value":"fatalError"}},{"kind":"Field","name":{"kind":"Name","value":"lastError"}},{"kind":"Field","name":{"kind":"Name","value":"renames"}},{"kind":"Field","name":{"kind":"Name","value":"retryError"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideCopies"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideCopyBytes"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideMoves"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideMoveBytes"}},{"kind":"Field","name":{"kind":"Name","value":"totalBytes"}},{"kind":"Field","name":{"kind":"Name","value":"totalChecks"}},{"kind":"Field","name":{"kind":"Name","value":"totalTransfers"}},{"kind":"Field","name":{"kind":"Name","value":"transferTime"}},{"kind":"Field","name":{"kind":"Name","value":"transfers"}},{"kind":"Field","name":{"kind":"Name","value":"transferring"}},{"kind":"Field","name":{"kind":"Name","value":"checking"}},{"kind":"Field","name":{"kind":"Name","value":"formattedBytes"}},{"kind":"Field","name":{"kind":"Name","value":"formattedSpeed"}},{"kind":"Field","name":{"kind":"Name","value":"formattedElapsedTime"}},{"kind":"Field","name":{"kind":"Name","value":"formattedEta"}},{"kind":"Field","name":{"kind":"Name","value":"calculatedPercentage"}},{"kind":"Field","name":{"kind":"Name","value":"isActivelyRunning"}},{"kind":"Field","name":{"kind":"Name","value":"isCompleted"}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"RCloneJob"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"RCloneJob"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"group"}},{"kind":"Field","name":{"kind":"Name","value":"configId"}},{"kind":"Field","name":{"kind":"Name","value":"finished"}},{"kind":"Field","name":{"kind":"Name","value":"success"}},{"kind":"Field","name":{"kind":"Name","value":"error"}},{"kind":"Field","name":{"kind":"Name","value":"status"}},{"kind":"Field","name":{"kind":"Name","value":"stats"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BackupStats"}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupJobConfigWithCurrentJob"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BackupJobConfig"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BackupJobConfig"}},{"kind":"Field","name":{"kind":"Name","value":"currentJob"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"RCloneJob"}}]}}]}}]} as unknown as DocumentNode; +export const BackupJobConfigDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"BackupJobConfig"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"id"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"PrefixedID"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"backupJobConfig"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"id"},"value":{"kind":"Variable","name":{"kind":"Name","value":"id"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BackupJobConfigWithCurrentJob"}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"PreprocessConfig"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"PreprocessConfig"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"type"}},{"kind":"Field","name":{"kind":"Name","value":"timeout"}},{"kind":"Field","name":{"kind":"Name","value":"cleanupOnFailure"}},{"kind":"Field","name":{"kind":"Name","value":"zfsConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"poolName"}},{"kind":"Field","name":{"kind":"Name","value":"datasetName"}},{"kind":"Field","name":{"kind":"Name","value":"snapshotPrefix"}},{"kind":"Field","name":{"kind":"Name","value":"cleanupSnapshots"}},{"kind":"Field","name":{"kind":"Name","value":"retainSnapshots"}}]}},{"kind":"Field","name":{"kind":"Name","value":"flashConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"flashPath"}},{"kind":"Field","name":{"kind":"Name","value":"includeGitHistory"}},{"kind":"Field","name":{"kind":"Name","value":"additionalPaths"}}]}},{"kind":"Field","name":{"kind":"Name","value":"scriptConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"scriptPath"}},{"kind":"Field","name":{"kind":"Name","value":"scriptArgs"}},{"kind":"Field","name":{"kind":"Name","value":"workingDirectory"}},{"kind":"Field","name":{"kind":"Name","value":"environment"}},{"kind":"Field","name":{"kind":"Name","value":"outputPath"}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupJobConfig"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BackupJobConfig"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"backupMode"}},{"kind":"Field","name":{"kind":"Name","value":"sourcePath"}},{"kind":"Field","name":{"kind":"Name","value":"remoteName"}},{"kind":"Field","name":{"kind":"Name","value":"destinationPath"}},{"kind":"Field","name":{"kind":"Name","value":"schedule"}},{"kind":"Field","name":{"kind":"Name","value":"enabled"}},{"kind":"Field","name":{"kind":"Name","value":"rcloneOptions"}},{"kind":"Field","name":{"kind":"Name","value":"preprocessConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"PreprocessConfig"}}]}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastRunAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastRunStatus"}},{"kind":"Field","name":{"kind":"Name","value":"currentJobId"}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupStats"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"RCloneJobStats"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"bytes"}},{"kind":"Field","name":{"kind":"Name","value":"speed"}},{"kind":"Field","name":{"kind":"Name","value":"eta"}},{"kind":"Field","name":{"kind":"Name","value":"elapsedTime"}},{"kind":"Field","name":{"kind":"Name","value":"percentage"}},{"kind":"Field","name":{"kind":"Name","value":"checks"}},{"kind":"Field","name":{"kind":"Name","value":"deletes"}},{"kind":"Field","name":{"kind":"Name","value":"errors"}},{"kind":"Field","name":{"kind":"Name","value":"fatalError"}},{"kind":"Field","name":{"kind":"Name","value":"lastError"}},{"kind":"Field","name":{"kind":"Name","value":"renames"}},{"kind":"Field","name":{"kind":"Name","value":"retryError"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideCopies"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideCopyBytes"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideMoves"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideMoveBytes"}},{"kind":"Field","name":{"kind":"Name","value":"totalBytes"}},{"kind":"Field","name":{"kind":"Name","value":"totalChecks"}},{"kind":"Field","name":{"kind":"Name","value":"totalTransfers"}},{"kind":"Field","name":{"kind":"Name","value":"transferTime"}},{"kind":"Field","name":{"kind":"Name","value":"transfers"}},{"kind":"Field","name":{"kind":"Name","value":"transferring"}},{"kind":"Field","name":{"kind":"Name","value":"checking"}},{"kind":"Field","name":{"kind":"Name","value":"formattedBytes"}},{"kind":"Field","name":{"kind":"Name","value":"formattedSpeed"}},{"kind":"Field","name":{"kind":"Name","value":"formattedElapsedTime"}},{"kind":"Field","name":{"kind":"Name","value":"formattedEta"}},{"kind":"Field","name":{"kind":"Name","value":"calculatedPercentage"}},{"kind":"Field","name":{"kind":"Name","value":"isActivelyRunning"}},{"kind":"Field","name":{"kind":"Name","value":"isCompleted"}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"RCloneJob"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"RCloneJob"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"group"}},{"kind":"Field","name":{"kind":"Name","value":"configId"}},{"kind":"Field","name":{"kind":"Name","value":"finished"}},{"kind":"Field","name":{"kind":"Name","value":"success"}},{"kind":"Field","name":{"kind":"Name","value":"error"}},{"kind":"Field","name":{"kind":"Name","value":"status"}},{"kind":"Field","name":{"kind":"Name","value":"stats"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BackupStats"}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupJobConfigWithCurrentJob"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BackupJobConfig"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BackupJobConfig"}},{"kind":"Field","name":{"kind":"Name","value":"currentJob"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"RCloneJob"}}]}}]}}]} as unknown as DocumentNode; +export const BackupJobConfigsDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"BackupJobConfigs"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"backup"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"configs"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BackupJobConfigWithCurrentJob"}}]}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"PreprocessConfig"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"PreprocessConfig"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"type"}},{"kind":"Field","name":{"kind":"Name","value":"timeout"}},{"kind":"Field","name":{"kind":"Name","value":"cleanupOnFailure"}},{"kind":"Field","name":{"kind":"Name","value":"zfsConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"poolName"}},{"kind":"Field","name":{"kind":"Name","value":"datasetName"}},{"kind":"Field","name":{"kind":"Name","value":"snapshotPrefix"}},{"kind":"Field","name":{"kind":"Name","value":"cleanupSnapshots"}},{"kind":"Field","name":{"kind":"Name","value":"retainSnapshots"}}]}},{"kind":"Field","name":{"kind":"Name","value":"flashConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"flashPath"}},{"kind":"Field","name":{"kind":"Name","value":"includeGitHistory"}},{"kind":"Field","name":{"kind":"Name","value":"additionalPaths"}}]}},{"kind":"Field","name":{"kind":"Name","value":"scriptConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"scriptPath"}},{"kind":"Field","name":{"kind":"Name","value":"scriptArgs"}},{"kind":"Field","name":{"kind":"Name","value":"workingDirectory"}},{"kind":"Field","name":{"kind":"Name","value":"environment"}},{"kind":"Field","name":{"kind":"Name","value":"outputPath"}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupJobConfig"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BackupJobConfig"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"backupMode"}},{"kind":"Field","name":{"kind":"Name","value":"sourcePath"}},{"kind":"Field","name":{"kind":"Name","value":"remoteName"}},{"kind":"Field","name":{"kind":"Name","value":"destinationPath"}},{"kind":"Field","name":{"kind":"Name","value":"schedule"}},{"kind":"Field","name":{"kind":"Name","value":"enabled"}},{"kind":"Field","name":{"kind":"Name","value":"rcloneOptions"}},{"kind":"Field","name":{"kind":"Name","value":"preprocessConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"PreprocessConfig"}}]}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastRunAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastRunStatus"}},{"kind":"Field","name":{"kind":"Name","value":"currentJobId"}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupStats"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"RCloneJobStats"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"bytes"}},{"kind":"Field","name":{"kind":"Name","value":"speed"}},{"kind":"Field","name":{"kind":"Name","value":"eta"}},{"kind":"Field","name":{"kind":"Name","value":"elapsedTime"}},{"kind":"Field","name":{"kind":"Name","value":"percentage"}},{"kind":"Field","name":{"kind":"Name","value":"checks"}},{"kind":"Field","name":{"kind":"Name","value":"deletes"}},{"kind":"Field","name":{"kind":"Name","value":"errors"}},{"kind":"Field","name":{"kind":"Name","value":"fatalError"}},{"kind":"Field","name":{"kind":"Name","value":"lastError"}},{"kind":"Field","name":{"kind":"Name","value":"renames"}},{"kind":"Field","name":{"kind":"Name","value":"retryError"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideCopies"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideCopyBytes"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideMoves"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideMoveBytes"}},{"kind":"Field","name":{"kind":"Name","value":"totalBytes"}},{"kind":"Field","name":{"kind":"Name","value":"totalChecks"}},{"kind":"Field","name":{"kind":"Name","value":"totalTransfers"}},{"kind":"Field","name":{"kind":"Name","value":"transferTime"}},{"kind":"Field","name":{"kind":"Name","value":"transfers"}},{"kind":"Field","name":{"kind":"Name","value":"transferring"}},{"kind":"Field","name":{"kind":"Name","value":"checking"}},{"kind":"Field","name":{"kind":"Name","value":"formattedBytes"}},{"kind":"Field","name":{"kind":"Name","value":"formattedSpeed"}},{"kind":"Field","name":{"kind":"Name","value":"formattedElapsedTime"}},{"kind":"Field","name":{"kind":"Name","value":"formattedEta"}},{"kind":"Field","name":{"kind":"Name","value":"calculatedPercentage"}},{"kind":"Field","name":{"kind":"Name","value":"isActivelyRunning"}},{"kind":"Field","name":{"kind":"Name","value":"isCompleted"}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"RCloneJob"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"RCloneJob"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"group"}},{"kind":"Field","name":{"kind":"Name","value":"configId"}},{"kind":"Field","name":{"kind":"Name","value":"finished"}},{"kind":"Field","name":{"kind":"Name","value":"success"}},{"kind":"Field","name":{"kind":"Name","value":"error"}},{"kind":"Field","name":{"kind":"Name","value":"status"}},{"kind":"Field","name":{"kind":"Name","value":"stats"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BackupStats"}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupJobConfigWithCurrentJob"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BackupJobConfig"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BackupJobConfig"}},{"kind":"Field","name":{"kind":"Name","value":"currentJob"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"RCloneJob"}}]}}]}}]} as unknown as DocumentNode; export const BackupJobConfigsListDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"BackupJobConfigsList"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"backup"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"configs"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"name"}}]}}]}}]}}]} as unknown as DocumentNode; export const BackupJobConfigFormDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"BackupJobConfigForm"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"input"}},"type":{"kind":"NamedType","name":{"kind":"Name","value":"BackupJobConfigFormInput"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"backupJobConfigForm"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"input"},"value":{"kind":"Variable","name":{"kind":"Name","value":"input"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"dataSchema"}},{"kind":"Field","name":{"kind":"Name","value":"uiSchema"}}]}}]}}]} as unknown as DocumentNode; -export const CreateBackupJobConfigDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"CreateBackupJobConfig"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"input"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"CreateBackupJobConfigInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"backup"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"createBackupJobConfig"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"input"},"value":{"kind":"Variable","name":{"kind":"Name","value":"input"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BackupJobConfig"}}]}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupJobConfig"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BackupJobConfig"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"sourcePath"}},{"kind":"Field","name":{"kind":"Name","value":"remoteName"}},{"kind":"Field","name":{"kind":"Name","value":"destinationPath"}},{"kind":"Field","name":{"kind":"Name","value":"schedule"}},{"kind":"Field","name":{"kind":"Name","value":"enabled"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastRunAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastRunStatus"}},{"kind":"Field","name":{"kind":"Name","value":"currentJobId"}}]}}]} as unknown as DocumentNode; -export const UpdateBackupJobConfigDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"UpdateBackupJobConfig"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"id"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"PrefixedID"}}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"input"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"UpdateBackupJobConfigInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"backup"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"updateBackupJobConfig"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"id"},"value":{"kind":"Variable","name":{"kind":"Name","value":"id"}}},{"kind":"Argument","name":{"kind":"Name","value":"input"},"value":{"kind":"Variable","name":{"kind":"Name","value":"input"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BackupJobConfig"}}]}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupJobConfig"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BackupJobConfig"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"sourcePath"}},{"kind":"Field","name":{"kind":"Name","value":"remoteName"}},{"kind":"Field","name":{"kind":"Name","value":"destinationPath"}},{"kind":"Field","name":{"kind":"Name","value":"schedule"}},{"kind":"Field","name":{"kind":"Name","value":"enabled"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastRunAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastRunStatus"}},{"kind":"Field","name":{"kind":"Name","value":"currentJobId"}}]}}]} as unknown as DocumentNode; +export const CreateBackupJobConfigDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"CreateBackupJobConfig"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"input"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"CreateBackupJobConfigInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"backup"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"createBackupJobConfig"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"input"},"value":{"kind":"Variable","name":{"kind":"Name","value":"input"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BackupJobConfig"}}]}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"PreprocessConfig"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"PreprocessConfig"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"type"}},{"kind":"Field","name":{"kind":"Name","value":"timeout"}},{"kind":"Field","name":{"kind":"Name","value":"cleanupOnFailure"}},{"kind":"Field","name":{"kind":"Name","value":"zfsConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"poolName"}},{"kind":"Field","name":{"kind":"Name","value":"datasetName"}},{"kind":"Field","name":{"kind":"Name","value":"snapshotPrefix"}},{"kind":"Field","name":{"kind":"Name","value":"cleanupSnapshots"}},{"kind":"Field","name":{"kind":"Name","value":"retainSnapshots"}}]}},{"kind":"Field","name":{"kind":"Name","value":"flashConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"flashPath"}},{"kind":"Field","name":{"kind":"Name","value":"includeGitHistory"}},{"kind":"Field","name":{"kind":"Name","value":"additionalPaths"}}]}},{"kind":"Field","name":{"kind":"Name","value":"scriptConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"scriptPath"}},{"kind":"Field","name":{"kind":"Name","value":"scriptArgs"}},{"kind":"Field","name":{"kind":"Name","value":"workingDirectory"}},{"kind":"Field","name":{"kind":"Name","value":"environment"}},{"kind":"Field","name":{"kind":"Name","value":"outputPath"}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupJobConfig"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BackupJobConfig"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"backupMode"}},{"kind":"Field","name":{"kind":"Name","value":"sourcePath"}},{"kind":"Field","name":{"kind":"Name","value":"remoteName"}},{"kind":"Field","name":{"kind":"Name","value":"destinationPath"}},{"kind":"Field","name":{"kind":"Name","value":"schedule"}},{"kind":"Field","name":{"kind":"Name","value":"enabled"}},{"kind":"Field","name":{"kind":"Name","value":"rcloneOptions"}},{"kind":"Field","name":{"kind":"Name","value":"preprocessConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"PreprocessConfig"}}]}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastRunAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastRunStatus"}},{"kind":"Field","name":{"kind":"Name","value":"currentJobId"}}]}}]} as unknown as DocumentNode; +export const UpdateBackupJobConfigDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"UpdateBackupJobConfig"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"id"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"PrefixedID"}}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"input"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"UpdateBackupJobConfigInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"backup"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"updateBackupJobConfig"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"id"},"value":{"kind":"Variable","name":{"kind":"Name","value":"id"}}},{"kind":"Argument","name":{"kind":"Name","value":"input"},"value":{"kind":"Variable","name":{"kind":"Name","value":"input"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BackupJobConfig"}}]}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"PreprocessConfig"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"PreprocessConfig"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"type"}},{"kind":"Field","name":{"kind":"Name","value":"timeout"}},{"kind":"Field","name":{"kind":"Name","value":"cleanupOnFailure"}},{"kind":"Field","name":{"kind":"Name","value":"zfsConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"poolName"}},{"kind":"Field","name":{"kind":"Name","value":"datasetName"}},{"kind":"Field","name":{"kind":"Name","value":"snapshotPrefix"}},{"kind":"Field","name":{"kind":"Name","value":"cleanupSnapshots"}},{"kind":"Field","name":{"kind":"Name","value":"retainSnapshots"}}]}},{"kind":"Field","name":{"kind":"Name","value":"flashConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"flashPath"}},{"kind":"Field","name":{"kind":"Name","value":"includeGitHistory"}},{"kind":"Field","name":{"kind":"Name","value":"additionalPaths"}}]}},{"kind":"Field","name":{"kind":"Name","value":"scriptConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"scriptPath"}},{"kind":"Field","name":{"kind":"Name","value":"scriptArgs"}},{"kind":"Field","name":{"kind":"Name","value":"workingDirectory"}},{"kind":"Field","name":{"kind":"Name","value":"environment"}},{"kind":"Field","name":{"kind":"Name","value":"outputPath"}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupJobConfig"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BackupJobConfig"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"backupMode"}},{"kind":"Field","name":{"kind":"Name","value":"sourcePath"}},{"kind":"Field","name":{"kind":"Name","value":"remoteName"}},{"kind":"Field","name":{"kind":"Name","value":"destinationPath"}},{"kind":"Field","name":{"kind":"Name","value":"schedule"}},{"kind":"Field","name":{"kind":"Name","value":"enabled"}},{"kind":"Field","name":{"kind":"Name","value":"rcloneOptions"}},{"kind":"Field","name":{"kind":"Name","value":"preprocessConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"PreprocessConfig"}}]}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastRunAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastRunStatus"}},{"kind":"Field","name":{"kind":"Name","value":"currentJobId"}}]}}]} as unknown as DocumentNode; export const DeleteBackupJobConfigDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"DeleteBackupJobConfig"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"id"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"PrefixedID"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"backup"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"deleteBackupJobConfig"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"id"},"value":{"kind":"Variable","name":{"kind":"Name","value":"id"}}}]}]}}]}}]} as unknown as DocumentNode; -export const ToggleBackupJobConfigDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"ToggleBackupJobConfig"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"id"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"PrefixedID"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"backup"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"toggleJobConfig"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"id"},"value":{"kind":"Variable","name":{"kind":"Name","value":"id"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BackupJobConfig"}}]}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupJobConfig"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BackupJobConfig"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"sourcePath"}},{"kind":"Field","name":{"kind":"Name","value":"remoteName"}},{"kind":"Field","name":{"kind":"Name","value":"destinationPath"}},{"kind":"Field","name":{"kind":"Name","value":"schedule"}},{"kind":"Field","name":{"kind":"Name","value":"enabled"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastRunAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastRunStatus"}},{"kind":"Field","name":{"kind":"Name","value":"currentJobId"}}]}}]} as unknown as DocumentNode; -export const TriggerBackupJobDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"TriggerBackupJob"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"id"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"PrefixedID"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"backup"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"triggerJob"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"id"},"value":{"kind":"Variable","name":{"kind":"Name","value":"id"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"status"}},{"kind":"Field","name":{"kind":"Name","value":"jobId"}}]}}]}}]}}]} as unknown as DocumentNode; +export const ToggleBackupJobConfigDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"ToggleBackupJobConfig"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"id"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"PrefixedID"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"backup"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"toggleJobConfig"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"id"},"value":{"kind":"Variable","name":{"kind":"Name","value":"id"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BackupJobConfig"}}]}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"PreprocessConfig"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"PreprocessConfig"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"type"}},{"kind":"Field","name":{"kind":"Name","value":"timeout"}},{"kind":"Field","name":{"kind":"Name","value":"cleanupOnFailure"}},{"kind":"Field","name":{"kind":"Name","value":"zfsConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"poolName"}},{"kind":"Field","name":{"kind":"Name","value":"datasetName"}},{"kind":"Field","name":{"kind":"Name","value":"snapshotPrefix"}},{"kind":"Field","name":{"kind":"Name","value":"cleanupSnapshots"}},{"kind":"Field","name":{"kind":"Name","value":"retainSnapshots"}}]}},{"kind":"Field","name":{"kind":"Name","value":"flashConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"flashPath"}},{"kind":"Field","name":{"kind":"Name","value":"includeGitHistory"}},{"kind":"Field","name":{"kind":"Name","value":"additionalPaths"}}]}},{"kind":"Field","name":{"kind":"Name","value":"scriptConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"scriptPath"}},{"kind":"Field","name":{"kind":"Name","value":"scriptArgs"}},{"kind":"Field","name":{"kind":"Name","value":"workingDirectory"}},{"kind":"Field","name":{"kind":"Name","value":"environment"}},{"kind":"Field","name":{"kind":"Name","value":"outputPath"}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupJobConfig"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BackupJobConfig"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"backupMode"}},{"kind":"Field","name":{"kind":"Name","value":"sourcePath"}},{"kind":"Field","name":{"kind":"Name","value":"remoteName"}},{"kind":"Field","name":{"kind":"Name","value":"destinationPath"}},{"kind":"Field","name":{"kind":"Name","value":"schedule"}},{"kind":"Field","name":{"kind":"Name","value":"enabled"}},{"kind":"Field","name":{"kind":"Name","value":"rcloneOptions"}},{"kind":"Field","name":{"kind":"Name","value":"preprocessConfig"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"PreprocessConfig"}}]}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}},{"kind":"Field","name":{"kind":"Name","value":"updatedAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastRunAt"}},{"kind":"Field","name":{"kind":"Name","value":"lastRunStatus"}},{"kind":"Field","name":{"kind":"Name","value":"currentJobId"}}]}}]} as unknown as DocumentNode; +export const TriggerBackupJobDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"TriggerBackupJob"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"id"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"PrefixedID"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"backup"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"triggerJob"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"id"},"value":{"kind":"Variable","name":{"kind":"Name","value":"id"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"jobId"}}]}}]}}]}}]} as unknown as DocumentNode; export const StopBackupJobDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"StopBackupJob"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"id"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"PrefixedID"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"backup"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"stopBackupJob"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"id"},"value":{"kind":"Variable","name":{"kind":"Name","value":"id"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"status"}},{"kind":"Field","name":{"kind":"Name","value":"jobId"}}]}}]}}]}}]} as unknown as DocumentNode; export const InitiateBackupDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"mutation","name":{"kind":"Name","value":"InitiateBackup"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"input"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"InitiateBackupInput"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"backup"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"initiateBackup"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"input"},"value":{"kind":"Variable","name":{"kind":"Name","value":"input"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"status"}},{"kind":"Field","name":{"kind":"Name","value":"jobId"}}]}}]}}]}}]} as unknown as DocumentNode; export const BackupJobProgressDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"subscription","name":{"kind":"Name","value":"BackupJobProgress"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"id"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"PrefixedID"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"backupJobProgress"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"id"},"value":{"kind":"Variable","name":{"kind":"Name","value":"id"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"stats"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BackupStats"}}]}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BackupStats"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"RCloneJobStats"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"bytes"}},{"kind":"Field","name":{"kind":"Name","value":"speed"}},{"kind":"Field","name":{"kind":"Name","value":"eta"}},{"kind":"Field","name":{"kind":"Name","value":"elapsedTime"}},{"kind":"Field","name":{"kind":"Name","value":"percentage"}},{"kind":"Field","name":{"kind":"Name","value":"checks"}},{"kind":"Field","name":{"kind":"Name","value":"deletes"}},{"kind":"Field","name":{"kind":"Name","value":"errors"}},{"kind":"Field","name":{"kind":"Name","value":"fatalError"}},{"kind":"Field","name":{"kind":"Name","value":"lastError"}},{"kind":"Field","name":{"kind":"Name","value":"renames"}},{"kind":"Field","name":{"kind":"Name","value":"retryError"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideCopies"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideCopyBytes"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideMoves"}},{"kind":"Field","name":{"kind":"Name","value":"serverSideMoveBytes"}},{"kind":"Field","name":{"kind":"Name","value":"totalBytes"}},{"kind":"Field","name":{"kind":"Name","value":"totalChecks"}},{"kind":"Field","name":{"kind":"Name","value":"totalTransfers"}},{"kind":"Field","name":{"kind":"Name","value":"transferTime"}},{"kind":"Field","name":{"kind":"Name","value":"transfers"}},{"kind":"Field","name":{"kind":"Name","value":"transferring"}},{"kind":"Field","name":{"kind":"Name","value":"checking"}},{"kind":"Field","name":{"kind":"Name","value":"formattedBytes"}},{"kind":"Field","name":{"kind":"Name","value":"formattedSpeed"}},{"kind":"Field","name":{"kind":"Name","value":"formattedElapsedTime"}},{"kind":"Field","name":{"kind":"Name","value":"formattedEta"}},{"kind":"Field","name":{"kind":"Name","value":"calculatedPercentage"}},{"kind":"Field","name":{"kind":"Name","value":"isActivelyRunning"}},{"kind":"Field","name":{"kind":"Name","value":"isCompleted"}}]}}]} as unknown as DocumentNode;