feat: progress on flash backup

This commit is contained in:
Eli Bosley
2025-05-26 16:03:00 -04:00
parent 5b0862dd98
commit 5fcb8da50b
44 changed files with 6299 additions and 2030 deletions

1412
.bivvy/m9X4-climb.md Normal file

File diff suppressed because it is too large Load Diff

180
.bivvy/m9X4-moves.json Normal file
View File

@@ -0,0 +1,180 @@
{
"climb": "m9X4",
"moves": [
{
"status": "complete",
"description": "Create preprocessing types and validation DTOs",
"details": "Create the core preprocessing types, enums, and validation DTOs as specified in the climb document. This includes PreprocessType enum, validation classes for ZFS, Flash, and Script configurations, and the main PreprocessConfigDto classes.",
"files": [
"api/src/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.ts"
]
},
{
"status": "complete",
"description": "Extend backup job data models with preprocessing fields",
"details": "Add preprocessing fields to the BackupJobConfig GraphQL model and input types. Include preprocessType, preprocessConfig, preprocessTimeout, and cleanupOnFailure fields with proper GraphQL decorators and validation.",
"files": [
"api/src/unraid-api/graph/resolvers/backup/backup.model.ts"
]
},
{
"status": "complete",
"description": "Update BackupJobConfigData interface with preprocessing fields",
"details": "Extend the BackupJobConfigData interface to include the new preprocessing fields and update the mapToGraphQL method to handle the new fields.",
"files": [
"api/src/unraid-api/graph/resolvers/backup/backup-config.service.ts"
]
},
{
"status": "complete",
"description": "Create preprocessing validation service",
"details": "Implement the PreprocessConfigValidationService with business logic validation, async validation for ZFS pools and scripts, and transformation methods as detailed in the climb document.",
"files": [
"api/src/unraid-api/graph/resolvers/backup/preprocessing/preprocessing-validation.service.ts"
],
"rest": true
},
{
"status": "complete",
"description": "Create streaming job manager",
"details": "Implement the StreamingJobManager class to handle subprocess lifecycle management, process tracking, progress monitoring, and cleanup for streaming operations like ZFS and Flash backups.",
"files": [
"api/src/unraid-api/graph/resolvers/backup/preprocessing/streaming-job-manager.service.ts"
]
},
{
"status": "complete",
"description": "Create core preprocessing service",
"details": "Implement the main PreprocessingService with methods for executing different preprocessing types, handling streaming operations, and managing cleanup. Include the PreprocessResult interface and core execution logic.",
"files": [
"api/src/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.service.ts"
]
},
{
"status": "complete",
"description": "Extend RClone API service with streaming capabilities",
"details": "Add streaming backup methods to RCloneApiService including startStreamingBackup, streaming job tracking integration, and unified job status management for both daemon and streaming jobs.",
"files": [
"api/src/unraid-api/graph/resolvers/rclone/rclone-api.service.ts"
],
"rest": true
},
{
"status": "complete",
"description": "Create ZFS preprocessing implementation",
"details": "Implement ZFS-specific preprocessing including snapshot creation, streaming via `zfs send | rclone rcat`, snapshot cleanup, and error handling for ZFS operations.",
"files": [
"api/src/unraid-api/graph/resolvers/backup/preprocessing/zfs-preprocessing.service.ts"
]
},
{
"status": "complete",
"description": "Create Flash backup preprocessing implementation",
"details": "Implement Flash backup preprocessing with local git repository setup, git operations, and streaming via `tar cf - /boot/.git | rclone rcat` as detailed in the climb document.",
"files": [
"api/src/unraid-api/graph/resolvers/backup/preprocessing/flash-preprocessing.service.ts"
]
},
{
"status": "complete",
"description": "Create custom script preprocessing implementation",
"details": "Implement custom script preprocessing with sandboxed execution, parameter passing, timeout handling, and file-based output (non-streaming for security).",
"files": [
"api/src/unraid-api/graph/resolvers/backup/preprocessing/script-preprocessing.service.ts"
]
},
{
"status": "complete",
"description": "Update backup config service with preprocessing integration",
"details": "Integrate preprocessing validation and execution into the backup config service. Update createBackupJobConfig, updateBackupJobConfig, and executeBackupJob methods to handle preprocessing.",
"files": [
"api/src/unraid-api/graph/resolvers/backup/backup-config.service.ts"
]
},
{
"status": "complete",
"description": "Update backup module with new services",
"details": "Add all new preprocessing services to the BackupModule providers array and ensure proper dependency injection setup.",
"files": [
"api/src/unraid-api/graph/resolvers/backup/backup.module.ts"
],
"rest": true
},
{
"status": "complete",
"description": "Update web GraphQL queries and fragments",
"details": "Add preprocessing fields to the BACKUP_JOB_CONFIG_FRAGMENT and update mutations to include the new preprocessing configuration fields.",
"files": [
"web/components/Backup/backup-jobs.query.ts"
]
},
{
"status": "todo",
"description": "Create preprocessing UI components",
"details": "Create Vue component for preprocessing configuration with dropdown for preprocessing type selection and dynamic form fields for each preprocessing type (ZFS, Flash, Script).",
"files": [
"web/components/Backup/PreprocessingConfig.vue"
]
},
{
"status": "todo",
"description": "Update backup job form component",
"details": "Integrate the PreprocessingConfig component into the backup job form and handle preprocessing configuration state management.",
"files": [
"web/components/Backup/BackupJobForm.vue"
]
},
{
"status": "todo",
"description": "Update backup job list component",
"details": "Add preprocessing status indicators to the backup job list and show preprocessing type and status information.",
"files": [
"web/components/Backup/BackupJobList.vue"
]
},
{
"status": "todo",
"description": "Create preprocessing status monitoring",
"details": "Create component to display preprocessing progress, streaming status, and error messages with real-time updates.",
"files": [
"web/components/Backup/PreprocessingStatus.vue"
],
"rest": true
},
{
"status": "skip",
"description": "Add preprocessing tests",
"details": "Create comprehensive unit tests for all preprocessing services including validation, execution, streaming operations, and error handling scenarios.",
"files": [
"api/src/__test__/preprocessing/preprocessing.service.spec.ts",
"api/src/__test__/preprocessing/zfs-preprocessing.service.spec.ts",
"api/src/__test__/preprocessing/flash-preprocessing.service.spec.ts",
"api/src/__test__/preprocessing/streaming-job-manager.spec.ts"
]
},
{
"status": "skip",
"description": "Add integration tests",
"details": "Create integration tests for end-to-end backup workflows with preprocessing, including ZFS snapshot streaming, Flash backup streaming, and error recovery scenarios.",
"files": [
"api/src/__test__/backup/backup-preprocessing-integration.spec.ts"
]
},
{
"status": "skip",
"description": "Update documentation",
"details": "Create comprehensive documentation for the preprocessing system including configuration examples, troubleshooting guide, and API reference.",
"files": [
"api/docs/backup-preprocessing.md"
]
},
{
"status": "skip",
"description": "Add preprocessing configuration examples",
"details": "Provide example configurations for each preprocessing type to help users understand the configuration options and best practices.",
"files": [
"api/docs/examples/preprocessing-configs.json"
]
}
]
}

View File

@@ -1,5 +1,5 @@
[api]
version="4.8.0"
version="4.4.1"
extraOrigins="https://google.com,https://test.com"
[local]
sandbox="yes"

View File

@@ -9,9 +9,9 @@
"enabled": false,
"rcloneOptions": {},
"createdAt": "2025-05-24T12:19:29.150Z",
"updatedAt": "2025-05-25T01:21:35.110Z",
"lastRunStatus": "Started with job ID: 51",
"currentJobId": 51,
"lastRunAt": "2025-05-25T01:21:35.111Z"
"updatedAt": "2025-05-26T16:14:13.977Z",
"lastRunStatus": "Started with job ID: 34",
"currentJobId": 34,
"lastRunAt": "2025-05-26T16:14:13.977Z"
}
]

View File

@@ -1,3 +0,0 @@
{
"demo": "hello.unraider"
}

File diff suppressed because it is too large Load Diff

View File

@@ -916,12 +916,91 @@ type BackupMutations {
input CreateBackupJobConfigInput {
name: String!
backupMode: BackupMode! = PREPROCESSING
sourcePath: String!
remoteName: String!
destinationPath: String!
schedule: String!
enabled: Boolean! = true
rcloneOptions: JSON
"""Preprocessing configuration for this backup job"""
preprocessConfig: PreprocessConfigInput
}
"""
The mode of backup to perform (Raw file backup or Preprocessing-based).
"""
enum BackupMode {
RAW
PREPROCESSING
}
input PreprocessConfigInput {
"""Type of preprocessing to perform"""
type: PreprocessType!
zfsConfig: ZfsPreprocessConfigInput
flashConfig: FlashPreprocessConfigInput
scriptConfig: ScriptPreprocessConfigInput
"""Timeout for preprocessing in seconds"""
timeout: Float! = 3600
"""Whether to cleanup on failure"""
cleanupOnFailure: Boolean! = true
}
"""Type of preprocessing to perform before backup"""
enum PreprocessType {
NONE
ZFS
FLASH
SCRIPT
}
input ZfsPreprocessConfigInput {
"""ZFS pool name"""
poolName: String!
"""Dataset name within the pool"""
datasetName: String!
"""Snapshot name prefix"""
snapshotPrefix: String
"""Whether to cleanup snapshots after backup"""
cleanupSnapshots: Boolean! = true
"""Number of snapshots to retain"""
retainSnapshots: Float
}
input FlashPreprocessConfigInput {
"""Flash drive mount path"""
flashPath: String! = "/boot"
"""Whether to include git history"""
includeGitHistory: Boolean! = true
"""Additional paths to include in backup"""
additionalPaths: [String!]
}
input ScriptPreprocessConfigInput {
"""Path to the script file"""
scriptPath: String!
"""Arguments to pass to the script"""
scriptArgs: [String!]
"""Working directory for script execution"""
workingDirectory: String
"""Environment variables for script execution"""
environment: JSON
"""Output file path where script should write data"""
outputPath: String!
}
input UpdateBackupJobConfigInput {
@@ -932,7 +1011,12 @@ input UpdateBackupJobConfigInput {
schedule: String
enabled: Boolean
rcloneOptions: JSON
"""Preprocessing configuration for this backup job"""
preprocessConfig: PreprocessConfigInput
lastRunStatus: String
currentJobId: String
lastRunAt: String
}
input InitiateBackupInput {
@@ -1067,6 +1151,37 @@ A date-time string at UTC, such as 2019-12-03T09:54:33Z, compliant with the date
"""
scalar DateTime
type ZfsPreprocessConfig {
poolName: String!
datasetName: String!
snapshotPrefix: String
cleanupSnapshots: Boolean!
retainSnapshots: Float
}
type FlashPreprocessConfig {
flashPath: String!
includeGitHistory: Boolean!
additionalPaths: [String!]
}
type ScriptPreprocessConfig {
scriptPath: String!
scriptArgs: [String!]
workingDirectory: String
environment: JSON
outputPath: String!
}
type PreprocessConfig {
type: PreprocessType!
zfsConfig: ZfsPreprocessConfig
flashConfig: FlashPreprocessConfig
scriptConfig: ScriptPreprocessConfig
timeout: Float!
cleanupOnFailure: Boolean!
}
type RCloneDrive {
"""Provider name"""
name: String!
@@ -1211,7 +1326,7 @@ type RCloneJob {
configId: PrefixedID
"""Current status of the job"""
status: RCloneJobStatus
status: BackupJobStatus
"""Whether the job is finished"""
finished: Boolean
@@ -1232,11 +1347,11 @@ type RCloneJob {
hasRecentJob: Boolean
}
"""Status of an RClone job"""
enum RCloneJobStatus {
"""Status of a backup job"""
enum BackupJobStatus {
RUNNING
COMPLETED
ERROR
FAILED
CANCELLED
}
@@ -1262,6 +1377,7 @@ type BackupJobConfig implements Node {
"""Human-readable name for this backup job"""
name: String!
backupMode: BackupMode!
"""Source path to backup"""
sourcePath: String!
@@ -1281,6 +1397,9 @@ type BackupJobConfig implements Node {
"""RClone options (e.g., --transfers, --checkers)"""
rcloneOptions: JSON
"""Preprocessing configuration for this backup job"""
preprocessConfig: PreprocessConfig
"""When this config was created"""
createdAt: DateTime!

View File

@@ -87,6 +87,27 @@ describe('RCloneApiService', () => {
formatDuration: vi.fn(),
} as any;
// Mock RCloneStatusService
const mockStatusService = {
enhanceStatsWithFormattedFields: vi.fn(),
transformStatsToJob: vi.fn(),
calculateCombinedStats: vi.fn(),
parseActiveJobs: vi.fn(),
parseBackupStatus: vi.fn(),
parseJobWithStats: vi.fn(),
parseAllJobsWithStats: vi.fn(),
parseJobsWithStats: vi.fn(),
getBackupStatus: vi.fn(),
} as any;
// Mock StreamingJobManager
const mockStreamingJobManager = {
startJob: vi.fn(),
stopJob: vi.fn(),
getJobStatus: vi.fn(),
getAllJobs: vi.fn(),
} as any;
// Mock cache manager
mockCacheManager = {
get: vi.fn().mockResolvedValue(null),
@@ -94,7 +115,7 @@ describe('RCloneApiService', () => {
del: vi.fn().mockResolvedValue(undefined),
};
service = new RCloneApiService(mockFormatService, mockCacheManager);
service = new RCloneApiService(mockStatusService, mockStreamingJobManager);
await service.onModuleInit();
});

View File

@@ -7,12 +7,19 @@ import { join } from 'path';
import { CronJob } from 'cron';
import { v4 as uuidv4 } from 'uuid';
import type {
PreprocessConfigInput,
PreprocessResult,
} from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.js';
import { getters } from '@app/store/index.js';
import {
BackupJobConfig,
BackupMode,
CreateBackupJobConfigInput,
UpdateBackupJobConfigInput,
} from '@app/unraid-api/graph/resolvers/backup/backup.model.js';
import { PreprocessingService } from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.service.js';
import { PreprocessType } from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.js';
import { RCloneService } from '@app/unraid-api/graph/resolvers/rclone/rclone.service.js';
const JOB_GROUP_PREFIX = 'backup-';
@@ -25,7 +32,9 @@ interface BackupJobConfigData {
destinationPath: string;
schedule: string;
enabled: boolean;
backupMode?: BackupMode;
rcloneOptions?: Record<string, unknown>;
preprocessConfig?: PreprocessConfigInput;
createdAt: string;
updatedAt: string;
lastRunAt?: string;
@@ -41,7 +50,8 @@ export class BackupConfigService implements OnModuleInit {
constructor(
private readonly rcloneService: RCloneService,
private readonly schedulerRegistry: SchedulerRegistry
private readonly schedulerRegistry: SchedulerRegistry,
private readonly preprocessingService: PreprocessingService
) {
const paths = getters.paths();
this.configPath = join(paths.backupBase, 'backup-jobs.json');
@@ -76,17 +86,35 @@ export class BackupConfigService implements OnModuleInit {
id: string,
input: UpdateBackupJobConfigInput
): Promise<BackupJobConfig | null> {
this.logger.debug(
`[updateBackupJobConfig] Called with ID: ${id}, Input: ${JSON.stringify(input)}`
);
const existing = this.configs.get(id);
if (!existing) return null;
if (!existing) {
this.logger.warn(`[updateBackupJobConfig] No existing config found for ID: ${id}`);
return null;
}
this.logger.debug(
`[updateBackupJobConfig] Existing config for ID ${id}: ${JSON.stringify(existing)}`
);
const updated: BackupJobConfigData = {
...existing,
...input,
updatedAt: new Date().toISOString(),
};
this.logger.debug(
`[updateBackupJobConfig] Updated object for ID ${id} (before set): ${JSON.stringify(updated)}`
);
this.configs.set(id, updated);
const immediatelyAfterSet = this.configs.get(id);
this.logger.debug(
`[updateBackupJobConfig] Config for ID ${id} (immediately after set): ${JSON.stringify(immediatelyAfterSet)}`
);
await this.saveConfigs();
this.logger.debug(`[updateBackupJobConfig] Configs saved for ID: ${id}`);
this.unscheduleJob(id);
if (updated.enabled) {
@@ -107,7 +135,15 @@ export class BackupConfigService implements OnModuleInit {
}
async getBackupJobConfig(id: string): Promise<BackupJobConfig | null> {
this.logger.debug(`[getBackupJobConfig] Called for ID: ${id}`);
const config = this.configs.get(id);
if (config) {
this.logger.debug(
`[getBackupJobConfig] Found config for ID ${id}: ${JSON.stringify(config)}`
);
} else {
this.logger.warn(`[getBackupJobConfig] No config found for ID: ${id}`);
}
return config ? this.mapToGraphQL(config) : null;
}
@@ -119,13 +155,66 @@ export class BackupConfigService implements OnModuleInit {
this.logger.log(`Executing backup job: ${config.name}`);
try {
const result = (await this.rcloneService['rcloneApiService'].startBackup({
srcPath: config.sourcePath,
dstPath: `${config.remoteName}:${config.destinationPath}`,
async: true,
configId: config.id,
options: config.rcloneOptions || {},
})) as { jobId?: string; jobid?: string };
let sourcePath = config.sourcePath;
let preprocessResult: PreprocessResult | null = null;
if (config.preprocessConfig && config.preprocessConfig.type !== PreprocessType.NONE) {
this.logger.log(`Running preprocessing for job: ${config.name}`);
preprocessResult = await this.preprocessingService.executePreprocessing(
config.preprocessConfig,
{
jobId: config.id,
onProgress: (progress) => {
this.logger.debug(`Preprocessing progress for ${config.name}: ${progress}%`);
},
onOutput: (data) => {
this.logger.debug(`Preprocessing output for ${config.name}: ${data}`);
},
onError: (error) => {
this.logger.error(`Preprocessing error for ${config.name}: ${error}`);
},
}
);
if (!preprocessResult.success) {
throw new Error(`Preprocessing failed: ${preprocessResult.error}`);
}
if (preprocessResult.streamPath) {
sourcePath = preprocessResult.streamPath;
this.logger.log(`Using streaming source for backup: ${sourcePath}`);
} else if (preprocessResult.outputPath) {
sourcePath = preprocessResult.outputPath;
this.logger.log(`Using preprocessed output for backup: ${sourcePath}`);
}
}
const isStreamingBackup =
preprocessResult?.streamPath &&
(config.preprocessConfig?.type === PreprocessType.ZFS ||
config.preprocessConfig?.type === PreprocessType.FLASH);
let result;
if (isStreamingBackup && preprocessResult?.streamPath) {
const streamingOptions = this.buildStreamingOptions(
config.preprocessConfig!.type,
preprocessResult.streamPath,
config.remoteName,
config.destinationPath
);
result =
await this.rcloneService['rcloneApiService'].startStreamingBackup(streamingOptions);
} else {
result = (await this.rcloneService['rcloneApiService'].startBackup({
srcPath: sourcePath,
dstPath: `${config.remoteName}:${config.destinationPath}`,
async: true,
configId: config.id,
options: config.rcloneOptions || {},
})) as { jobId?: string; jobid?: string };
}
const jobId = result.jobId || result.jobid;
@@ -145,6 +234,47 @@ export class BackupConfigService implements OnModuleInit {
await this.saveConfigs();
this.logger.error(`Backup job ${config.name} failed:`, error);
if (config.preprocessConfig?.cleanupOnFailure) {
try {
await this.preprocessingService.cleanup(config.id);
} catch (cleanupError) {
this.logger.error(
`Failed to cleanup preprocessing for job ${config.name}:`,
cleanupError
);
}
}
}
}
private buildStreamingOptions(
preprocessType: PreprocessType,
streamPath: string,
remoteName: string,
destinationPath: string
) {
switch (preprocessType) {
case PreprocessType.ZFS:
return {
remoteName,
remotePath: destinationPath,
sourceCommand: 'zfs',
sourceArgs: ['send', streamPath],
preprocessType,
timeout: 3600000,
};
case PreprocessType.FLASH:
return {
remoteName,
remotePath: destinationPath,
sourceCommand: 'tar',
sourceArgs: ['cf', '-', streamPath],
preprocessType,
timeout: 3600000,
};
default:
throw new Error(`Unsupported streaming preprocessing type: ${preprocessType}`);
}
}
@@ -229,15 +359,23 @@ export class BackupConfigService implements OnModuleInit {
}
private mapToGraphQL(config: BackupJobConfigData): BackupJobConfig {
const preprocessConfig = config.preprocessConfig
? {
...config.preprocessConfig,
}
: undefined;
return {
id: config.id,
name: config.name,
backupMode: config.backupMode || BackupMode.PREPROCESSING,
sourcePath: config.sourcePath,
remoteName: config.remoteName,
destinationPath: config.destinationPath,
schedule: config.schedule,
enabled: config.enabled,
rcloneOptions: config.rcloneOptions,
preprocessConfig: preprocessConfig,
createdAt: new Date(config.createdAt),
updatedAt: new Date(config.updatedAt),
lastRunAt: config.lastRunAt ? new Date(config.lastRunAt) : undefined,

View File

@@ -173,20 +173,13 @@ export class BackupMutationsResolver {
config.id
);
// Store the job ID in the config if successful
// Store the job ID and update timestamps in the config if successful
if (result.jobId) {
await this.backupConfigService.updateBackupJobConfig(id, {
lastRunStatus: `Started with job ID: ${result.jobId}`,
currentJobId: result.jobId,
lastRunAt: new Date().toISOString(),
});
// Update the currentJobId in the config
const configData = this.backupConfigService['configs'].get(id);
if (configData) {
configData.currentJobId = result.jobId;
configData.lastRunAt = new Date().toISOString();
this.backupConfigService['configs'].set(id, configData);
await this.backupConfigService['saveConfigs']();
}
}
return result;

View File

@@ -1,14 +1,39 @@
import { Field, InputType, ObjectType } from '@nestjs/graphql';
import { Field, InputType, ObjectType, registerEnumType } from '@nestjs/graphql';
import { type Layout } from '@jsonforms/core';
import { IsBoolean, IsNotEmpty, IsObject, IsOptional, IsString, Matches } from 'class-validator';
import { Type } from 'class-transformer';
import {
IsBoolean,
IsEnum,
IsNotEmpty,
IsObject,
IsOptional,
IsString,
Matches,
ValidateIf,
ValidateNested,
} from 'class-validator';
import { GraphQLJSON } from 'graphql-scalars';
import {
PreprocessConfig,
PreprocessConfigInput,
} from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.js';
import { Node } from '@app/unraid-api/graph/resolvers/base.model.js';
import { RCloneJob } from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
import { PrefixedID } from '@app/unraid-api/graph/scalars/graphql-type-prefixed-id.js';
import { DataSlice } from '@app/unraid-api/types/json-forms.js';
export enum BackupMode {
RAW = 'RAW',
PREPROCESSING = 'PREPROCESSING',
}
registerEnumType(BackupMode, {
name: 'BackupMode',
description: 'The mode of backup to perform (Raw file backup or Preprocessing-based).',
});
@ObjectType({
implements: () => Node,
})
@@ -73,6 +98,9 @@ export class BackupJobConfig extends Node {
@Field(() => String, { description: 'Human-readable name for this backup job' })
name!: string;
@Field(() => BackupMode)
backupMode!: BackupMode;
@Field(() => String, { description: 'Source path to backup' })
sourcePath!: string;
@@ -96,6 +124,12 @@ export class BackupJobConfig extends Node {
})
rcloneOptions?: Record<string, unknown>;
@Field(() => PreprocessConfig, {
description: 'Preprocessing configuration for this backup job',
nullable: true,
})
preprocessConfig?: PreprocessConfig;
@Field(() => Date, { description: 'When this config was created' })
createdAt!: Date;
@@ -122,9 +156,15 @@ export class CreateBackupJobConfigInput {
@IsNotEmpty()
name!: string;
@Field(() => BackupMode, { defaultValue: BackupMode.PREPROCESSING })
@IsEnum(BackupMode)
@IsNotEmpty()
backupMode?: BackupMode;
@Field(() => String)
@IsString()
@IsNotEmpty()
@ValidateIf((o) => o.backupMode === BackupMode.RAW)
@IsNotEmpty({ message: 'sourcePath should not be empty when backupMode is RAW' })
sourcePath!: string;
@Field(() => String)
@@ -156,6 +196,16 @@ export class CreateBackupJobConfigInput {
@IsOptional()
@IsObject()
rcloneOptions?: Record<string, unknown>;
@Field(() => PreprocessConfigInput, {
description: 'Preprocessing configuration for this backup job',
nullable: true,
})
@IsOptional()
@ValidateIf((o) => o.backupMode === BackupMode.PREPROCESSING)
@ValidateNested()
@Type(() => PreprocessConfigInput)
preprocessConfig?: PreprocessConfigInput;
}
@InputType()
@@ -206,10 +256,29 @@ export class UpdateBackupJobConfigInput {
@IsObject()
rcloneOptions?: Record<string, unknown>;
@Field(() => PreprocessConfigInput, {
description: 'Preprocessing configuration for this backup job',
nullable: true,
})
@IsOptional()
@ValidateNested()
@Type(() => PreprocessConfigInput)
preprocessConfig?: PreprocessConfigInput;
@Field(() => String, { nullable: true })
@IsOptional()
@IsString()
lastRunStatus?: string;
@Field(() => String, { nullable: true })
@IsOptional()
@IsString()
currentJobId?: string;
@Field(() => String, { nullable: true })
@IsOptional()
@IsString()
lastRunAt?: string;
}
@ObjectType()

View File

@@ -7,10 +7,11 @@ import {
BackupJobConfigResolver,
BackupResolver,
} from '@app/unraid-api/graph/resolvers/backup/backup.resolver.js';
import { PreprocessingModule } from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.module.js';
import { RCloneModule } from '@app/unraid-api/graph/resolvers/rclone/rclone.module.js';
@Module({
imports: [RCloneModule, ScheduleModule.forRoot()],
imports: [RCloneModule, ScheduleModule.forRoot(), PreprocessingModule],
providers: [BackupResolver, BackupJobConfigResolver, BackupMutationsResolver, BackupConfigService],
exports: [],
})

View File

@@ -10,14 +10,16 @@ import {
BackupJobConfigFormInput,
BackupStatus,
} from '@app/unraid-api/graph/resolvers/backup/backup.model.js';
import {
BACKUP_JOB_GROUP_PREFIX,
getBackupJobGroupId,
} from '@app/unraid-api/graph/resolvers/backup/backup.utils.js';
import { buildBackupJobConfigSchema } from '@app/unraid-api/graph/resolvers/backup/jsonforms/backup-jsonforms-config.js';
import { RCloneJob, RCloneJobStatus } from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
import { BackupJobStatus, RCloneJob } from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
import { RCloneService } from '@app/unraid-api/graph/resolvers/rclone/rclone.service.js';
import { PrefixedID } from '@app/unraid-api/graph/scalars/graphql-type-prefixed-id.js';
import { FormatService } from '@app/unraid-api/utils/format.service.js';
const JOB_GROUP_PREFIX = 'backup-';
@Resolver(() => Backup)
export class BackupResolver {
private readonly logger = new Logger(BackupResolver.name);
@@ -87,12 +89,10 @@ export class BackupResolver {
async backupJobConfigForm(
@Args('input', { nullable: true }) input?: BackupJobConfigFormInput
): Promise<BackupJobConfigForm> {
const remoteNames = await this.rcloneService.getConfiguredRemotes();
const showAdvanced = input?.showAdvanced ?? false;
const remotes = await this.rcloneService.getRemoteDetails();
const { dataSchema, uiSchema } = buildBackupJobConfigSchema({
remoteNames,
showAdvanced,
remotes,
});
return {
@@ -115,7 +115,7 @@ export class BackupResolver {
this.logger.debug('backupJobs called - returning all jobs for frontend filtering');
const jobs = (await this.rcloneService['rcloneApiService'].getAllJobsWithStats()).filter(
(job) => job.group?.startsWith(JOB_GROUP_PREFIX)
(job) => job.group?.startsWith(BACKUP_JOB_GROUP_PREFIX)
);
this.logger.debug(`Returning ${jobs.length} jobs total for frontend filtering`);
@@ -140,9 +140,21 @@ export class BackupJobConfigResolver {
})
async currentJob(@Parent() config: BackupJobConfig): Promise<RCloneJob | null> {
if (!config.currentJobId) {
// If there's no currentJobId, we assume no job is running for this config.
// Or, if currentJobId exists but is an empty string, also assume no job.
return null;
}
// Construct the group ID using the new utility function.
// const groupId = getBackupJobGroupId(config.id); // Old problematic line
this.logger.debug(
`Looking for current job for config ${config.id} using currentJobId: ${config.currentJobId}`
);
// Pass the specific rclone job ID (config.currentJobId) as the primary identifier.
// The second argument `config.id` is used by getEnhancedJobStatus to populate RCloneJob.configId
// and assist in constructing the full RCloneJob.id.
return this.rcloneService.getEnhancedJobStatus(config.currentJobId, config.id);
}
}

View File

@@ -0,0 +1,32 @@
export const BACKUP_JOB_GROUP_PREFIX = 'backup-';
/**
* Generates the group ID for a backup job based on its configuration ID.
* This group ID is used by RClone to group related backup operations.
* @param configId The ID of the backup job configuration.
* @returns The RClone group ID string.
*/
export function getBackupJobGroupId(configId: string): string {
return `${BACKUP_JOB_GROUP_PREFIX}${configId}`;
}
/**
* Extracts the configuration ID from a backup job group ID.
* @param groupId The RClone group ID string (e.g., "backup-someConfigId").
* @returns The configuration ID if the group ID is valid and prefixed, otherwise undefined.
*/
export function getConfigIdFromGroupId(groupId: string): string | undefined {
if (groupId.startsWith(BACKUP_JOB_GROUP_PREFIX)) {
return groupId.substring(BACKUP_JOB_GROUP_PREFIX.length);
}
return undefined;
}
/**
* Checks if the given ID corresponds to a backup job group.
* @param id The ID string to check (can be a job ID or a group ID).
* @returns True if the ID represents a backup job group, false otherwise.
*/
export function isBackupJobGroup(id: string): boolean {
return id.startsWith(BACKUP_JOB_GROUP_PREFIX);
}

View File

@@ -1,11 +1,13 @@
import type { LabelElement, Layout } from '@jsonforms/core';
import { JsonSchema7 } from '@jsonforms/core';
import type { LabelElement, Layout, SchemaBasedCondition } from '@jsonforms/core';
import { JsonSchema7, RuleEffect } from '@jsonforms/core';
import type { RCloneRemote } from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
import type { DataSlice, SettingSlice, UIElement } from '@app/unraid-api/types/json-forms.js';
import { BackupMode } from '@app/unraid-api/graph/resolvers/backup/backup.model.js';
import { createLabeledControl } from '@app/unraid-api/graph/utils/form-utils.js';
import { mergeSettingSlices } from '@app/unraid-api/types/json-forms.js';
function getBasicBackupConfigSlice({ remoteNames = [] }: { remoteNames?: string[] }): SettingSlice {
function getBasicBackupConfigSlice({ remotes = [] }: { remotes?: RCloneRemote[] }): SettingSlice {
const basicConfigElements: UIElement[] = [
createLabeledControl({
scope: '#/properties/name',
@@ -18,12 +20,23 @@ function getBasicBackupConfigSlice({ remoteNames = [] }: { remoteNames?: string[
}),
createLabeledControl({
scope: '#/properties/sourcePath',
label: 'Source Path',
description: 'The local path to backup (e.g., /mnt/user/Documents)',
scope: '#/properties/backupMode',
label: 'Backup Mode',
description: 'Choose between preprocessing-based backup or raw file backup',
controlOptions: {
placeholder: '/mnt/user/',
format: 'string',
suggestions: [
{
value: BackupMode.PREPROCESSING,
label: 'Preprocessing Backup',
tooltip:
'Advanced backup using ZFS snapshots, flash drive backup, or custom scripts to prepare data before transfer',
},
{
value: BackupMode.RAW,
label: 'Raw File Backup',
tooltip: 'Simple folder-to-folder backup with direct file/directory paths',
},
],
},
}),
@@ -32,9 +45,9 @@ function getBasicBackupConfigSlice({ remoteNames = [] }: { remoteNames?: string[
label: 'Remote Configuration',
description: 'Select the RClone remote configuration to use for this backup',
controlOptions: {
suggestions: remoteNames.map((name) => ({
value: name,
label: name,
suggestions: remotes.map((remote) => ({
value: remote.name,
label: `${remote.name} (${remote.type})`,
})),
},
}),
@@ -68,6 +81,16 @@ function getBasicBackupConfigSlice({ remoteNames = [] }: { remoteNames?: string[
label: 'Weekly (Sunday 2:00 AM)',
tooltip: 'Runs every Sunday at 2:00 AM',
},
{
value: '0 9 * * 1',
label: 'Mondays at 9:00 AM',
tooltip: 'Runs every Monday at 9:00 AM',
},
{
value: '0 0 1 * *',
label: 'Monthly (1st day at midnight)',
tooltip: 'Runs on the 1st day of every month at midnight',
},
{
value: '0 2 1 * *',
label: 'Monthly (1st at 2:00 AM)',
@@ -90,23 +113,6 @@ function getBasicBackupConfigSlice({ remoteNames = [] }: { remoteNames?: string[
toggle: true,
},
}),
{
type: 'Label',
text: 'Advanced Options',
options: {
description: 'Optional RClone-specific settings for this backup job.',
},
} as LabelElement,
createLabeledControl({
scope: '#/properties/showAdvanced',
label: 'Show Advanced RClone Options',
description: 'Display additional RClone configuration options',
controlOptions: {
toggle: true,
},
}),
];
const basicConfigProperties: Record<string, JsonSchema7> = {
@@ -117,17 +123,18 @@ function getBasicBackupConfigSlice({ remoteNames = [] }: { remoteNames?: string[
minLength: 1,
maxLength: 100,
},
sourcePath: {
backupMode: {
type: 'string',
title: 'Source Path',
description: 'Source path to backup',
minLength: 1,
title: 'Backup Mode',
description: 'Type of backup to perform',
enum: [BackupMode.PREPROCESSING, BackupMode.RAW],
default: BackupMode.PREPROCESSING,
},
remoteName: {
type: 'string',
title: 'Remote Name',
description: 'Remote name from rclone config',
enum: remoteNames.length > 0 ? remoteNames : ['No remotes configured'],
enum: remotes.length > 0 ? remotes.map((remote) => remote.name) : ['No remotes configured'],
},
destinationPath: {
type: 'string',
@@ -149,11 +156,13 @@ function getBasicBackupConfigSlice({ remoteNames = [] }: { remoteNames?: string[
description: 'Whether this backup job is enabled',
default: true,
},
showAdvanced: {
type: 'boolean',
title: 'Show Advanced Options',
description: 'Whether to show advanced RClone options',
default: false,
configStep: {
type: 'object',
properties: {
current: { type: 'integer', default: 0 },
total: { type: 'integer', default: 3 },
},
default: { current: 0, total: 3 },
},
};
@@ -169,14 +178,103 @@ function getBasicBackupConfigSlice({ remoteNames = [] }: { remoteNames?: string[
};
}
function getAdvancedBackupConfigSlice({ showAdvanced }: { showAdvanced: boolean }): SettingSlice {
if (!showAdvanced) {
return {
properties: {},
elements: [],
};
}
function getRawBackupConfigSlice(): SettingSlice {
const rawConfigElements: UIElement[] = [
{
type: 'Label',
text: 'Raw Backup Configuration',
options: {
description: 'Configure direct file/folder backup with manual source paths.',
},
} as LabelElement,
createLabeledControl({
scope: '#/properties/rawConfig/properties/sourcePath',
label: 'Source Path',
description: 'The local path to backup (e.g., /mnt/user/Documents)',
controlOptions: {
placeholder: '/mnt/user/',
format: 'string',
},
}),
createLabeledControl({
scope: '#/properties/rawConfig/properties/excludePatterns',
label: 'Exclude Patterns',
description: 'File patterns to exclude from backup (one per line, supports wildcards)',
controlOptions: {
multi: true,
placeholder: '*.tmp',
format: 'string',
},
}),
createLabeledControl({
scope: '#/properties/rawConfig/properties/includePatterns',
label: 'Include Patterns',
description: 'File patterns to specifically include (one per line, supports wildcards)',
controlOptions: {
multi: true,
placeholder: '*.pdf',
format: 'string',
},
}),
];
const rawConfigProperties: Record<string, JsonSchema7> = {
rawConfig: {
type: 'object',
title: 'Raw Backup Configuration',
description: 'Configuration for direct file backup',
properties: {
sourcePath: {
type: 'string',
title: 'Source Path',
description: 'Source path to backup',
minLength: 1,
},
excludePatterns: {
type: 'array',
title: 'Exclude Patterns',
description: 'Patterns to exclude from backup',
items: {
type: 'string',
},
default: [],
},
includePatterns: {
type: 'array',
title: 'Include Patterns',
description: 'Patterns to include in backup',
items: {
type: 'string',
},
default: [],
},
},
required: ['sourcePath'],
},
};
const conditionalLayoutElement: UIElement = {
type: 'VerticalLayout',
rule: {
effect: RuleEffect.SHOW,
condition: {
scope: '#/properties/backupMode',
schema: { const: BackupMode.RAW },
} as SchemaBasedCondition,
},
elements: rawConfigElements,
};
return {
properties: rawConfigProperties,
elements: [conditionalLayoutElement],
};
}
function getAdvancedBackupConfigSlice(): SettingSlice {
const advancedConfigElements: UIElement[] = [
createLabeledControl({
scope: '#/properties/rcloneOptions/properties/transfers',
@@ -275,7 +373,7 @@ function getAdvancedBackupConfigSlice({ showAdvanced }: { showAdvanced: boolean
const verticalLayoutElement: UIElement = {
type: 'VerticalLayout',
elements: advancedConfigElements,
options: { step: 1, showDividers: true },
options: { step: 2, showDividers: true },
};
return {
@@ -284,27 +382,411 @@ function getAdvancedBackupConfigSlice({ showAdvanced }: { showAdvanced: boolean
};
}
export function buildBackupJobConfigSchema({
remoteNames = [],
showAdvanced = false,
}: {
remoteNames?: string[];
showAdvanced?: boolean;
}): {
function getPreprocessingConfigSlice(): SettingSlice {
const preprocessingElements: UIElement[] = [
{
type: 'Label',
text: 'Preprocessing Configuration',
options: {
description:
'Configure preprocessing steps to run before backup (e.g., ZFS snapshots, Flash backup, custom scripts).',
},
} as LabelElement,
createLabeledControl({
scope: '#/properties/preprocessConfig/properties/type',
label: 'Preprocessing Type',
description: 'Select the type of preprocessing to perform before backup',
controlOptions: {
suggestions: [
{
value: 'ZFS',
label: 'ZFS Snapshot',
tooltip: 'Create ZFS snapshot and stream it',
},
{
value: 'FLASH',
label: 'Flash Backup',
tooltip: 'Backup Unraid flash drive with git history',
},
{
value: 'SCRIPT',
label: 'Custom Script',
tooltip: 'Run custom script before backup',
},
],
},
}),
createLabeledControl({
scope: '#/properties/preprocessConfig/properties/timeout',
label: 'Timeout (seconds)',
description: 'Maximum time to wait for preprocessing to complete (default: 300 seconds)',
controlOptions: {
placeholder: '300',
format: 'number',
},
}),
createLabeledControl({
scope: '#/properties/preprocessConfig/properties/cleanupOnFailure',
label: 'Cleanup on Failure',
description: 'Whether to clean up preprocessing artifacts if the backup fails',
controlOptions: {
toggle: true,
},
}),
// ZFS Configuration
{
type: 'VerticalLayout',
rule: {
effect: RuleEffect.SHOW,
condition: {
scope: '#/properties/preprocessConfig/properties/type',
schema: { const: 'ZFS' },
} as SchemaBasedCondition,
},
elements: [
{
type: 'Label',
text: 'ZFS Configuration',
options: {
description: 'Configure ZFS snapshot settings for preprocessing.',
},
} as LabelElement,
createLabeledControl({
scope: '#/properties/preprocessConfig/properties/zfsConfig/properties/poolName',
label: 'ZFS Pool Name',
description: 'Name of the ZFS pool containing the dataset',
controlOptions: {
placeholder: 'tank',
format: 'string',
},
}),
createLabeledControl({
scope: '#/properties/preprocessConfig/properties/zfsConfig/properties/datasetName',
label: 'Dataset Name',
description: 'Name of the ZFS dataset to snapshot',
controlOptions: {
placeholder: 'data/documents',
format: 'string',
},
}),
createLabeledControl({
scope: '#/properties/preprocessConfig/properties/zfsConfig/properties/snapshotPrefix',
label: 'Snapshot Prefix',
description: 'Prefix for snapshot names (default: backup)',
controlOptions: {
placeholder: 'backup',
format: 'string',
},
}),
createLabeledControl({
scope: '#/properties/preprocessConfig/properties/zfsConfig/properties/cleanupSnapshots',
label: 'Cleanup Snapshots',
description: 'Whether to clean up snapshots after backup',
controlOptions: {
toggle: true,
},
}),
createLabeledControl({
scope: '#/properties/preprocessConfig/properties/zfsConfig/properties/retainSnapshots',
label: 'Retain Snapshots',
description: 'Number of snapshots to retain (0 = keep all)',
controlOptions: {
placeholder: '5',
format: 'number',
},
}),
],
},
// Flash Configuration
{
type: 'VerticalLayout',
rule: {
effect: RuleEffect.SHOW,
condition: {
scope: '#/properties/preprocessConfig/properties/type',
schema: { const: 'FLASH' },
} as SchemaBasedCondition,
},
elements: [
{
type: 'Label',
text: 'Flash Backup Configuration',
options: {
description: 'Configure Unraid flash drive backup settings.',
},
} as LabelElement,
createLabeledControl({
scope: '#/properties/preprocessConfig/properties/flashConfig/properties/flashPath',
label: 'Flash Path',
description: 'Path to the Unraid flash drive (default: /boot)',
controlOptions: {
placeholder: '/boot',
format: 'string',
},
}),
createLabeledControl({
scope: '#/properties/preprocessConfig/properties/flashConfig/properties/includeGitHistory',
label: 'Include Git History',
description: 'Whether to include git history in the backup',
controlOptions: {
toggle: true,
},
}),
createLabeledControl({
scope: '#/properties/preprocessConfig/properties/flashConfig/properties/additionalPaths',
label: 'Additional Paths',
description: 'Additional paths to include in flash backup (one per line)',
controlOptions: {
multi: true,
placeholder: '/boot/config/plugins',
format: 'string',
},
}),
],
},
// Script Configuration
{
type: 'VerticalLayout',
rule: {
effect: RuleEffect.SHOW,
condition: {
scope: '#/properties/preprocessConfig/properties/type',
schema: { const: 'SCRIPT' },
} as SchemaBasedCondition,
},
elements: [
{
type: 'Label',
text: 'Custom Script Configuration',
options: {
description: 'Configure custom script execution settings.',
},
} as LabelElement,
createLabeledControl({
scope: '#/properties/preprocessConfig/properties/scriptConfig/properties/scriptPath',
label: 'Script Path',
description: 'Full path to the script to execute',
controlOptions: {
placeholder: '/mnt/user/scripts/backup-prep.sh',
format: 'string',
},
}),
createLabeledControl({
scope: '#/properties/preprocessConfig/properties/scriptConfig/properties/scriptArgs',
label: 'Script Arguments',
description: 'Arguments to pass to the script (one per line)',
controlOptions: {
multi: true,
placeholder: '--verbose',
format: 'string',
},
}),
createLabeledControl({
scope: '#/properties/preprocessConfig/properties/scriptConfig/properties/workingDirectory',
label: 'Working Directory',
description: 'Working directory for script execution',
controlOptions: {
placeholder: '/tmp',
format: 'string',
},
}),
createLabeledControl({
scope: '#/properties/preprocessConfig/properties/scriptConfig/properties/outputPath',
label: 'Output Path',
description: 'Path where script should write output files for backup',
controlOptions: {
placeholder: '/tmp/backup-output',
format: 'string',
},
}),
],
},
];
const preprocessingProperties: Record<string, JsonSchema7> = {
preprocessConfig: {
type: 'object',
title: 'Preprocessing Configuration',
description: 'Configuration for preprocessing steps before backup',
properties: {
type: {
type: 'string',
title: 'Preprocessing Type',
description: 'Type of preprocessing to perform',
enum: ['ZFS', 'FLASH', 'SCRIPT'],
},
timeout: {
type: 'integer',
title: 'Timeout',
description: 'Timeout in seconds for preprocessing',
minimum: 30,
maximum: 3600,
default: 300,
},
cleanupOnFailure: {
type: 'boolean',
title: 'Cleanup on Failure',
description: 'Clean up preprocessing artifacts on failure',
default: true,
},
zfsConfig: {
type: 'object',
title: 'ZFS Configuration',
properties: {
poolName: {
type: 'string',
title: 'Pool Name',
description: 'ZFS pool name',
minLength: 1,
},
datasetName: {
type: 'string',
title: 'Dataset Name',
description: 'ZFS dataset name',
minLength: 1,
},
snapshotPrefix: {
type: 'string',
title: 'Snapshot Prefix',
description: 'Prefix for snapshot names',
default: 'backup',
},
cleanupSnapshots: {
type: 'boolean',
title: 'Cleanup Snapshots',
description: 'Clean up snapshots after backup',
default: true,
},
retainSnapshots: {
type: 'integer',
title: 'Retain Snapshots',
description: 'Number of snapshots to retain',
minimum: 0,
default: 5,
},
},
required: ['poolName', 'datasetName'],
},
flashConfig: {
type: 'object',
title: 'Flash Configuration',
properties: {
flashPath: {
type: 'string',
title: 'Flash Path',
description: 'Path to flash drive',
default: '/boot',
},
includeGitHistory: {
type: 'boolean',
title: 'Include Git History',
description: 'Include git history in backup',
default: true,
},
additionalPaths: {
type: 'array',
title: 'Additional Paths',
description: 'Additional paths to include',
items: {
type: 'string',
},
default: [],
},
},
},
scriptConfig: {
type: 'object',
title: 'Script Configuration',
properties: {
scriptPath: {
type: 'string',
title: 'Script Path',
description: 'Path to script file',
minLength: 1,
},
scriptArgs: {
type: 'array',
title: 'Script Arguments',
description: 'Arguments for script',
items: {
type: 'string',
},
default: [],
},
workingDirectory: {
type: 'string',
title: 'Working Directory',
description: 'Working directory for script',
default: '/tmp',
},
outputPath: {
type: 'string',
title: 'Output Path',
description: 'Path for script output',
minLength: 1,
},
},
required: ['scriptPath', 'outputPath'],
},
},
required: ['type'],
},
};
const conditionalLayoutElement: UIElement = {
type: 'VerticalLayout',
rule: {
effect: RuleEffect.SHOW,
condition: {
scope: '#/properties/backupMode',
schema: { const: BackupMode.PREPROCESSING },
} as SchemaBasedCondition,
},
elements: preprocessingElements,
};
return {
properties: preprocessingProperties,
elements: [conditionalLayoutElement],
};
}
export function buildBackupJobConfigSchema({ remotes = [] }: { remotes?: RCloneRemote[] }): {
dataSchema: { properties: DataSlice; type: 'object' };
uiSchema: Layout;
} {
const slicesToMerge: SettingSlice[] = [];
const basicSlice = getBasicBackupConfigSlice({ remoteNames });
const basicSlice = getBasicBackupConfigSlice({ remotes });
slicesToMerge.push(basicSlice);
const advancedSlice = getAdvancedBackupConfigSlice({ showAdvanced });
if (
showAdvanced &&
(advancedSlice.elements.length > 0 || Object.keys(advancedSlice.properties).length > 0)
) {
slicesToMerge.push(advancedSlice);
const preprocessingSlice = getPreprocessingConfigSlice();
slicesToMerge.push(preprocessingSlice);
const rawBackupSlice = getRawBackupConfigSlice();
slicesToMerge.push(rawBackupSlice);
const advancedSlice = getAdvancedBackupConfigSlice();
if (Object.keys(advancedSlice.properties).length > 0) {
slicesToMerge.push({ properties: advancedSlice.properties, elements: [] });
}
const mergedSlices = mergeSettingSlices(slicesToMerge);
@@ -314,18 +796,33 @@ export function buildBackupJobConfigSchema({
properties: mergedSlices.properties,
};
const steps = [{ label: 'Backup Configuration', description: 'Basic backup job settings' }];
const steps = [
{ label: 'Backup Configuration', description: 'Basic backup job settings and mode selection' },
{
label: 'Source Configuration',
description: 'Configure backup source (preprocessing or raw files)',
},
{ label: 'Advanced Options', description: 'RClone-specific settings' },
];
if (showAdvanced) {
steps.push({ label: 'Advanced Options', description: 'RClone-specific settings' });
}
const step0Elements = basicSlice.elements;
const step1WrapperLayout: UIElement = {
type: 'VerticalLayout',
elements: [...(preprocessingSlice.elements || []), ...(rawBackupSlice.elements || [])],
options: { step: 1 },
};
const step2Elements = advancedSlice.elements;
const steppedLayoutElement: UIElement = {
type: 'SteppedLayout',
options: {
steps: steps,
},
elements: mergedSlices.elements,
elements: [...(step0Elements || []), step1WrapperLayout, ...(step2Elements || [])].filter(
(el) => el
) as UIElement[],
};
const titleLabel: UIElement = {

View File

@@ -0,0 +1,261 @@
import { Injectable, Logger } from '@nestjs/common';
import { access, mkdir, writeFile } from 'fs/promises';
import { dirname, join } from 'path';
import { execa } from 'execa';
import { FlashValidationService } from '@app/unraid-api/graph/resolvers/backup/preprocessing/flash-validation.service.js';
import {
FlashPreprocessConfigInput,
PreprocessResult,
PreprocessType,
} from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.js';
import {
RCloneApiService,
StreamingBackupOptions,
} from '@app/unraid-api/graph/resolvers/rclone/rclone-api.service.js';
@Injectable()
export class FlashPreprocessingService {
private readonly logger = new Logger(FlashPreprocessingService.name);
constructor(
private readonly rcloneApiService: RCloneApiService,
private readonly flashValidationService: FlashValidationService
) {}
async executeFlashPreprocessing(
config: FlashPreprocessConfigInput,
remoteName: string,
remotePath: string,
timeout: number = 3600000
): Promise<PreprocessResult> {
// Validate configuration first
const validationResult = await this.flashValidationService.validateFlashConfig(config);
if (!validationResult.isValid) {
return {
success: false,
error: `Flash configuration validation failed: ${validationResult.errors.join(', ')}`,
metadata: {
validationErrors: validationResult.errors,
validationWarnings: validationResult.warnings,
},
};
}
// Log any warnings
if (validationResult.warnings.length > 0) {
this.logger.warn(`Flash preprocessing warnings: ${validationResult.warnings.join(', ')}`);
}
const tempGitPath = join(config.flashPath, '.git-backup-temp');
let gitRepoInitialized = false;
try {
// Initialize git repository if needed and includeGitHistory is enabled
if (config.includeGitHistory) {
gitRepoInitialized = await this.initializeGitRepository(config.flashPath, tempGitPath);
if (gitRepoInitialized) {
this.logger.log(`Initialized git repository for Flash backup at: ${tempGitPath}`);
}
}
// Stream the Flash backup directly to rclone
const streamingResult = await this.streamFlashBackup(
config,
remoteName,
remotePath,
tempGitPath,
gitRepoInitialized,
timeout
);
// Cleanup temporary git repository
if (gitRepoInitialized) {
await this.cleanupTempGitRepo(tempGitPath);
this.logger.log(`Cleaned up temporary git repository: ${tempGitPath}`);
}
return {
success: true,
outputPath: `${remoteName}:${remotePath}`,
metadata: {
flashPath: config.flashPath,
gitHistoryIncluded: config.includeGitHistory && gitRepoInitialized,
additionalPaths: config.additionalPaths,
bytesTransferred: streamingResult.bytesTransferred,
duration: streamingResult.duration,
validationWarnings: validationResult.warnings,
flashInfo: validationResult.metadata,
jobId: streamingResult.jobId,
},
};
} catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : String(error);
this.logger.error(
`Flash preprocessing failed: ${errorMessage}`,
error instanceof Error ? error.stack : undefined
);
// Cleanup temporary git repository on failure
if (gitRepoInitialized) {
try {
await this.cleanupTempGitRepo(tempGitPath);
this.logger.log(`Cleaned up temporary git repository after failure: ${tempGitPath}`);
} catch (cleanupError: unknown) {
const cleanupErrorMessage =
cleanupError instanceof Error ? cleanupError.message : String(cleanupError);
this.logger.error(
`Failed to cleanup temporary git repository: ${cleanupErrorMessage}`
);
}
}
return {
success: false,
error: errorMessage,
cleanupRequired: gitRepoInitialized,
metadata: {
flashPath: config.flashPath,
gitRepoInitialized,
cleanupAttempted: gitRepoInitialized,
},
};
}
}
private async initializeGitRepository(flashPath: string, tempGitPath: string): Promise<boolean> {
try {
// Check if git repository already exists
const existingGitPath = join(flashPath, '.git');
const hasExistingRepo = await this.flashValidationService.validateGitRepository(flashPath);
if (hasExistingRepo) {
// Copy existing .git directory to temp location
await execa('cp', ['-r', existingGitPath, tempGitPath]);
this.logger.log('Copied existing git repository to temporary location');
return true;
}
// Initialize new git repository in temp location
await mkdir(tempGitPath, { recursive: true });
await execa('git', ['init'], { cwd: tempGitPath });
// Create a gitignore file to exclude sensitive files
const gitignorePath = join(tempGitPath, '.gitignore');
const gitignoreContent = [
'# Exclude sensitive files',
'*.key',
'*.pem',
'*.p12',
'*.pfx',
'config/passwd',
'config/shadow',
'config/ssh/',
'config/ssl/',
'config/wireguard/',
'config/network.cfg',
'config/ident.cfg',
].join('\n');
await writeFile(gitignorePath, gitignoreContent);
// Add all files to the repository
await execa('git', ['add', '.'], { cwd: flashPath });
await execa(
'git',
[
'-c',
'user.name=Unraid Backup',
'-c',
'user.email=backup@unraid.net',
'commit',
'-m',
'Flash backup snapshot',
],
{ cwd: flashPath }
);
// Move .git directory to temp location
await execa('mv', [join(flashPath, '.git'), tempGitPath]);
this.logger.log('Initialized new git repository for Flash backup');
return true;
} catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : String(error);
this.logger.warn(`Failed to initialize git repository: ${errorMessage}`);
return false;
}
}
private async streamFlashBackup(
config: FlashPreprocessConfigInput,
remoteName: string,
remotePath: string,
tempGitPath: string,
includeGit: boolean,
timeout: number
): Promise<{ bytesTransferred?: number; duration: number; jobId?: string }> {
// Build tar command arguments
const tarArgs = ['cf', '-'];
// Add flash directory contents (exclude .git-backup-temp to avoid conflicts)
tarArgs.push('--exclude=.git-backup-temp', '-C', config.flashPath, '.');
// Add git repository if available
if (includeGit) {
tarArgs.push('-C', dirname(tempGitPath), '.git-backup-temp');
}
// Add additional paths if specified
if (config.additionalPaths && config.additionalPaths.length > 0) {
for (const additionalPath of config.additionalPaths) {
try {
await access(additionalPath);
tarArgs.push('-C', dirname(additionalPath), '.');
} catch (error: unknown) {
this.logger.warn(`Skipping inaccessible additional path: ${additionalPath}`);
}
}
}
const streamingOptions: StreamingBackupOptions = {
remoteName,
remotePath,
sourceCommand: 'tar',
sourceArgs: tarArgs,
preprocessType: PreprocessType.FLASH,
timeout,
onProgress: (progress) => {
this.logger.debug(`Flash backup streaming progress: ${progress}%`);
},
onOutput: (data) => {
this.logger.debug(`Flash backup output: ${data.slice(0, 100)}...`);
},
onError: (error) => {
this.logger.error(`Flash backup error: ${error}`);
},
};
const result = await this.rcloneApiService.startStreamingBackup(streamingOptions);
if (!result.success) {
throw new Error(result.error || 'Flash backup streaming failed');
}
return {
bytesTransferred: result.bytesTransferred,
duration: result.duration,
jobId: result.jobId,
};
}
private async cleanupTempGitRepo(tempGitPath: string): Promise<void> {
try {
await execa('rm', ['-rf', tempGitPath]);
} catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : String(error);
throw new Error(`Temporary git repository cleanup failed: ${errorMessage}`);
}
}
}

View File

@@ -0,0 +1,260 @@
import { Injectable, Logger } from '@nestjs/common';
import { access, stat } from 'fs/promises';
import { join } from 'path';
import { execa } from 'execa';
import { FlashPreprocessConfigInput } from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.js';
export interface FlashValidationResult {
isValid: boolean;
errors: string[];
warnings: string[];
metadata: {
flashPathExists?: boolean;
flashPathMounted?: boolean;
gitRepoExists?: boolean;
gitRepoSize?: number | null;
additionalPathsValid?: boolean[];
totalSize?: number | null;
availableSpace?: number | null;
};
}
@Injectable()
export class FlashValidationService {
private readonly logger = new Logger(FlashValidationService.name);
async validateFlashConfig(config: FlashPreprocessConfigInput): Promise<FlashValidationResult> {
const result: FlashValidationResult = {
isValid: true,
errors: [],
warnings: [],
metadata: {},
};
try {
// Validate flash path exists and is accessible
const flashPathValid = await this.validateFlashPath(config.flashPath);
result.metadata.flashPathExists = flashPathValid;
if (!flashPathValid) {
result.errors.push(
`Flash path '${config.flashPath}' does not exist or is not accessible`
);
result.isValid = false;
return result;
}
// Check if flash path is mounted
const isMounted = await this.isFlashMounted(config.flashPath);
result.metadata.flashPathMounted = isMounted;
if (!isMounted) {
result.warnings.push(`Flash path '${config.flashPath}' may not be properly mounted`);
}
// Validate git repository if includeGitHistory is enabled
if (config.includeGitHistory) {
const gitRepoExists = await this.validateGitRepository(config.flashPath);
result.metadata.gitRepoExists = gitRepoExists;
if (!gitRepoExists) {
result.warnings.push(
`Git repository not found in '${config.flashPath}'. Git history will be skipped.`
);
} else {
const gitRepoSize = await this.getGitRepositorySize(config.flashPath);
result.metadata.gitRepoSize = gitRepoSize;
if (gitRepoSize && gitRepoSize > 100 * 1024 * 1024) {
// 100MB
result.warnings.push(
`Git repository is large (${Math.round(gitRepoSize / 1024 / 1024)}MB). Backup may take longer.`
);
}
}
}
// Validate additional paths
if (config.additionalPaths && config.additionalPaths.length > 0) {
const pathValidations = await Promise.all(
config.additionalPaths.map((path) => this.validateAdditionalPath(path))
);
result.metadata.additionalPathsValid = pathValidations;
const invalidPaths = config.additionalPaths.filter(
(_, index) => !pathValidations[index]
);
if (invalidPaths.length > 0) {
result.warnings.push(
`Some additional paths are not accessible: ${invalidPaths.join(', ')}`
);
}
}
// Calculate total backup size
const totalSize = await this.calculateTotalBackupSize(config);
result.metadata.totalSize = totalSize;
// Check available space
const availableSpace = await this.getAvailableSpace(config.flashPath);
result.metadata.availableSpace = availableSpace;
if (totalSize && availableSpace && totalSize > availableSpace * 0.8) {
result.warnings.push(
'Backup size may be close to available space. Monitor disk usage during backup.'
);
}
} catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : String(error);
result.errors.push(`Validation failed: ${errorMessage}`);
result.isValid = false;
}
return result;
}
async validateFlashPath(flashPath: string): Promise<boolean> {
try {
await access(flashPath);
const stats = await stat(flashPath);
return stats.isDirectory();
} catch {
return false;
}
}
async isFlashMounted(flashPath: string): Promise<boolean> {
try {
// Check if the path is a mount point by comparing device IDs
const pathStat = await stat(flashPath);
const parentStat = await stat(join(flashPath, '..'));
return pathStat.dev !== parentStat.dev;
} catch {
return false;
}
}
async validateGitRepository(flashPath: string): Promise<boolean> {
const gitPath = join(flashPath, '.git');
try {
await access(gitPath);
const stats = await stat(gitPath);
return stats.isDirectory();
} catch {
return false;
}
}
async getGitRepositorySize(flashPath: string): Promise<number | null> {
const gitPath = join(flashPath, '.git');
try {
const { stdout } = await execa('du', ['-sb', gitPath]);
const size = parseInt(stdout.split('\t')[0], 10);
return isNaN(size) ? null : size;
} catch {
return null;
}
}
async validateAdditionalPath(path: string): Promise<boolean> {
try {
await access(path);
return true;
} catch {
return false;
}
}
async calculateTotalBackupSize(config: FlashPreprocessConfigInput): Promise<number | null> {
try {
let totalSize = 0;
// Get flash directory size
const { stdout: flashSize } = await execa('du', ['-sb', config.flashPath]);
totalSize += parseInt(flashSize.split('\t')[0], 10) || 0;
// Add additional paths if specified
if (config.additionalPaths) {
for (const path of config.additionalPaths) {
try {
const { stdout: pathSize } = await execa('du', ['-sb', path]);
totalSize += parseInt(pathSize.split('\t')[0], 10) || 0;
} catch (error: unknown) {
this.logger.warn(
`Failed to get size for additional path ${path}: ${error instanceof Error ? error.message : String(error)}`
);
}
}
}
return totalSize;
} catch {
return null;
}
}
async getAvailableSpace(path: string): Promise<number | null> {
try {
const { stdout } = await execa('df', ['-B1', path]);
const lines = stdout.split('\n');
if (lines.length > 1) {
const fields = lines[1].split(/\s+/);
if (fields.length >= 4) {
const available = parseInt(fields[3], 10);
return isNaN(available) ? null : available;
}
}
return null;
} catch {
return null;
}
}
async checkGitStatus(flashPath: string): Promise<{
hasUncommittedChanges: boolean;
currentBranch: string | null;
lastCommitHash: string | null;
}> {
const result = {
hasUncommittedChanges: false,
currentBranch: null as string | null,
lastCommitHash: null as string | null,
};
try {
// Check for uncommitted changes
const { stdout: statusOutput } = await execa('git', ['status', '--porcelain'], {
cwd: flashPath,
});
result.hasUncommittedChanges = statusOutput.trim().length > 0;
// Get current branch
try {
const { stdout: branchOutput } = await execa(
'git',
['rev-parse', '--abbrev-ref', 'HEAD'],
{ cwd: flashPath }
);
result.currentBranch = branchOutput.trim();
} catch {
// Ignore branch detection errors
}
// Get last commit hash
try {
const { stdout: commitOutput } = await execa('git', ['rev-parse', 'HEAD'], {
cwd: flashPath,
});
result.lastCommitHash = commitOutput.trim();
} catch {
// Ignore commit hash detection errors
}
} catch {
// Git commands failed, repository might not be initialized
}
return result;
}
}

View File

@@ -0,0 +1,297 @@
import { BadRequestException, Injectable, Logger } from '@nestjs/common';
import { existsSync } from 'fs';
import { access, constants } from 'fs/promises';
import { plainToClass } from 'class-transformer';
import { validate, ValidationError } from 'class-validator';
import { execa } from 'execa';
import {
FlashPreprocessConfigInput,
PreprocessConfigInput,
PreprocessType,
ScriptPreprocessConfigInput,
ZfsPreprocessConfigInput,
} from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.js';
export interface ValidatedPreprocessConfig {
type: PreprocessType;
config?: {
zfs?: ZfsPreprocessConfigInput;
flash?: FlashPreprocessConfigInput;
script?: ScriptPreprocessConfigInput;
};
timeout: number;
cleanupOnFailure: boolean;
}
@Injectable()
export class PreprocessConfigValidationService {
private readonly logger = new Logger(PreprocessConfigValidationService.name);
async validateAndTransform(input: PreprocessConfigInput): Promise<ValidatedPreprocessConfig> {
const dto = plainToClass(PreprocessConfigInput, input);
const validationErrors = await validate(dto);
if (validationErrors.length > 0) {
const errorMessages = this.formatValidationErrors(validationErrors);
throw new BadRequestException(`Validation failed: ${errorMessages}`);
}
const businessErrors = this.validateBusinessRules(dto);
if (businessErrors.length > 0) {
throw new BadRequestException(`Configuration errors: ${businessErrors.join('; ')}`);
}
await this.validateAsyncRules(dto);
return this.transformToValidatedConfig(dto);
}
private formatValidationErrors(errors: ValidationError[]): string {
return errors
.map((error) => {
const constraints = error.constraints || {};
return Object.values(constraints).join(', ');
})
.join('; ');
}
private validateBusinessRules(dto: PreprocessConfigInput): string[] {
const errors: string[] = [];
if (dto.type !== PreprocessType.NONE) {
if (!dto.zfsConfig && !dto.flashConfig && !dto.scriptConfig) {
errors.push('Preprocessing configuration is required when type is not "none"');
}
}
if (dto.type === PreprocessType.ZFS && !dto.zfsConfig) {
errors.push('ZFS configuration is required when type is "zfs"');
}
if (dto.type === PreprocessType.FLASH && !dto.flashConfig) {
errors.push('Flash configuration is required when type is "flash"');
}
if (dto.type === PreprocessType.SCRIPT && !dto.scriptConfig) {
errors.push('Script configuration is required when type is "script"');
}
if (dto.type === PreprocessType.ZFS && dto.zfsConfig) {
errors.push(...this.validateZfsConfig(dto.zfsConfig));
}
if (dto.type === PreprocessType.FLASH && dto.flashConfig) {
errors.push(...this.validateFlashConfig(dto.flashConfig));
}
if (dto.type === PreprocessType.SCRIPT && dto.scriptConfig) {
errors.push(...this.validateScriptConfig(dto.scriptConfig));
}
return errors;
}
private validateZfsConfig(config: ZfsPreprocessConfigInput): string[] {
const errors: string[] = [];
if (config.poolName.includes('..') || config.poolName.startsWith('/')) {
errors.push('Invalid ZFS pool name format');
}
if (config.datasetName.includes('..') || config.datasetName.includes('//')) {
errors.push('Invalid ZFS dataset name format');
}
if (config.retainSnapshots && config.retainSnapshots < 1) {
errors.push('Retain snapshots must be at least 1');
}
return errors;
}
private validateFlashConfig(config: FlashPreprocessConfigInput): string[] {
const errors: string[] = [];
if (!config.flashPath.startsWith('/')) {
errors.push('Flash path must be an absolute path');
}
if (config.additionalPaths) {
for (const path of config.additionalPaths) {
if (!path.startsWith('/')) {
errors.push(`Additional path "${path}" must be an absolute path`);
}
}
}
return errors;
}
private validateScriptConfig(config: ScriptPreprocessConfigInput): string[] {
const errors: string[] = [];
if (!config.scriptPath.startsWith('/')) {
errors.push('Script path must be an absolute path');
}
if (!config.scriptPath.match(/\.(sh|py|pl|js)$/)) {
errors.push('Script must have a valid extension (.sh, .py, .pl, .js)');
}
if (!config.outputPath.startsWith('/')) {
errors.push('Output path must be an absolute path');
}
if (
config.scriptArgs?.some((arg) => arg.includes(';') || arg.includes('|') || arg.includes('&'))
) {
errors.push('Script arguments cannot contain shell operators (;, |, &)');
}
if (config.workingDirectory && !config.workingDirectory.startsWith('/')) {
errors.push('Working directory must be an absolute path');
}
return errors;
}
private async validateAsyncRules(dto: PreprocessConfigInput): Promise<void> {
if (dto.type === PreprocessType.ZFS && dto.zfsConfig) {
const poolExists = await this.validateZfsPool(dto.zfsConfig.poolName);
if (!poolExists) {
throw new BadRequestException(`ZFS pool '${dto.zfsConfig.poolName}' does not exist`);
}
const datasetExists = await this.validateZfsDataset(
dto.zfsConfig.poolName,
dto.zfsConfig.datasetName
);
if (!datasetExists) {
throw new BadRequestException(
`ZFS dataset '${dto.zfsConfig.poolName}/${dto.zfsConfig.datasetName}' does not exist`
);
}
}
if (dto.type === PreprocessType.SCRIPT && dto.scriptConfig) {
const scriptExists = await this.validateScriptExists(dto.scriptConfig.scriptPath);
if (!scriptExists) {
throw new BadRequestException(
`Script '${dto.scriptConfig.scriptPath}' does not exist or is not executable`
);
}
}
if (dto.type === PreprocessType.FLASH && dto.flashConfig) {
const flashPathExists = await this.validateFlashPath(dto.flashConfig.flashPath);
if (!flashPathExists) {
throw new BadRequestException(
`Flash path '${dto.flashConfig.flashPath}' does not exist`
);
}
}
}
async validateZfsPool(poolName: string): Promise<boolean> {
try {
const result = await execa('zpool', ['list', '-H', '-o', 'name'], { timeout: 5000 });
const pools = result.stdout.split('\n').filter((line) => line.trim());
return pools.includes(poolName);
} catch (error) {
this.logger.warn(`Failed to check ZFS pool existence: ${error}`);
return false;
}
}
async validateZfsDataset(poolName: string, datasetName: string): Promise<boolean> {
try {
const fullDatasetName = `${poolName}/${datasetName}`;
const result = await execa('zfs', ['list', '-H', '-o', 'name', fullDatasetName], {
timeout: 5000,
});
return result.stdout.trim() === fullDatasetName;
} catch (error) {
this.logger.warn(`Failed to check ZFS dataset existence: ${error}`);
return false;
}
}
async validateScriptExists(scriptPath: string): Promise<boolean> {
try {
if (!existsSync(scriptPath)) {
return false;
}
await access(scriptPath, constants.F_OK | constants.X_OK);
return true;
} catch (error) {
this.logger.warn(`Failed to validate script: ${error}`);
return false;
}
}
async validateFlashPath(flashPath: string): Promise<boolean> {
try {
await access(flashPath, constants.F_OK | constants.R_OK);
return true;
} catch (error) {
this.logger.warn(`Failed to validate flash path: ${error}`);
return false;
}
}
private transformToValidatedConfig(dto: PreprocessConfigInput): ValidatedPreprocessConfig {
const config: ValidatedPreprocessConfig = {
type: dto.type,
timeout: dto.timeout,
cleanupOnFailure: dto.cleanupOnFailure,
};
if (dto.type !== PreprocessType.NONE) {
config.config = {};
if (dto.zfsConfig) {
config.config.zfs = dto.zfsConfig;
}
if (dto.flashConfig) {
config.config.flash = dto.flashConfig;
}
if (dto.scriptConfig) {
config.config.script = dto.scriptConfig;
}
}
return config;
}
async validatePreprocessingCapabilities(): Promise<{
zfsAvailable: boolean;
flashAvailable: boolean;
scriptingAvailable: boolean;
}> {
const [zfsAvailable, flashAvailable] = await Promise.all([
this.checkZfsAvailability(),
this.validateFlashPath('/boot'),
]);
return {
zfsAvailable,
flashAvailable,
scriptingAvailable: true,
};
}
private async checkZfsAvailability(): Promise<boolean> {
try {
await execa('which', ['zfs'], { timeout: 2000 });
await execa('which', ['zpool'], { timeout: 2000 });
return true;
} catch (error) {
return false;
}
}
}

View File

@@ -0,0 +1,38 @@
import { forwardRef, Module } from '@nestjs/common';
import { FlashPreprocessingService } from '@app/unraid-api/graph/resolvers/backup/preprocessing/flash-preprocessing.service.js';
import { FlashValidationService } from '@app/unraid-api/graph/resolvers/backup/preprocessing/flash-validation.service.js';
import { PreprocessConfigValidationService } from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing-validation.service.js';
import { PreprocessingService } from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.service.js';
import { ScriptPreprocessingService } from '@app/unraid-api/graph/resolvers/backup/preprocessing/script-preprocessing.service.js';
import { StreamingJobManager } from '@app/unraid-api/graph/resolvers/backup/preprocessing/streaming-job-manager.service.js';
import { ZfsPreprocessingService } from '@app/unraid-api/graph/resolvers/backup/preprocessing/zfs-preprocessing.service.js';
import { ZfsValidationService } from '@app/unraid-api/graph/resolvers/backup/preprocessing/zfs-validation.service.js';
import { RCloneApiService } from '@app/unraid-api/graph/resolvers/rclone/rclone-api.service.js';
import { RCloneModule } from '@app/unraid-api/graph/resolvers/rclone/rclone.module.js';
@Module({
imports: [forwardRef(() => RCloneModule)],
providers: [
RCloneApiService,
PreprocessingService,
PreprocessConfigValidationService,
StreamingJobManager,
ZfsPreprocessingService,
FlashPreprocessingService,
ScriptPreprocessingService,
ZfsValidationService,
FlashValidationService,
],
exports: [
PreprocessingService,
PreprocessConfigValidationService,
StreamingJobManager,
ZfsPreprocessingService,
FlashPreprocessingService,
ScriptPreprocessingService,
ZfsValidationService,
FlashValidationService,
],
})
export class PreprocessingModule {}

View File

@@ -0,0 +1,441 @@
import { BadRequestException, Injectable, Logger } from '@nestjs/common';
import { EventEmitter } from 'events';
import { existsSync } from 'fs';
import { unlink } from 'fs/promises';
import { v4 as uuidv4 } from 'uuid';
import {
PreprocessConfigValidationService,
ValidatedPreprocessConfig,
} from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing-validation.service.js';
import {
PreprocessConfigInput,
PreprocessResult,
PreprocessType,
StreamingJobInfo,
} from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.js';
import {
StreamingJobManager,
StreamingJobOptions,
StreamingJobResult,
} from '@app/unraid-api/graph/resolvers/backup/preprocessing/streaming-job-manager.service.js';
export interface PreprocessingOptions {
jobId?: string;
onProgress?: (progress: number) => void;
onOutput?: (data: string) => void;
onError?: (error: string) => void;
}
@Injectable()
export class PreprocessingService extends EventEmitter {
private readonly logger = new Logger(PreprocessingService.name);
private readonly activePreprocessJobs = new Map<string, PreprocessResult>();
constructor(
private readonly validationService: PreprocessConfigValidationService,
private readonly streamingJobManager: StreamingJobManager
) {
super();
this.setupEventListeners();
}
async executePreprocessing(
config: PreprocessConfigInput,
options: PreprocessingOptions = {}
): Promise<PreprocessResult> {
const jobId = options.jobId || uuidv4();
try {
this.logger.log(`Starting preprocessing job ${jobId} with type: ${config.type}`);
const validatedConfig = await this.validationService.validateAndTransform(config);
if (validatedConfig.type === PreprocessType.NONE) {
return this.createSuccessResult(jobId, { type: 'none' });
}
const result = await this.executePreprocessingByType(validatedConfig, jobId, options);
this.activePreprocessJobs.set(jobId, result);
this.emit('preprocessingCompleted', { jobId, result });
return result;
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
this.logger.error(`Preprocessing job ${jobId} failed: ${errorMessage}`, error);
const result = this.createErrorResult(jobId, errorMessage);
this.activePreprocessJobs.set(jobId, result);
this.emit('preprocessingFailed', { jobId, result, error });
if (config.cleanupOnFailure) {
await this.cleanup(jobId);
}
return result;
}
}
private async executePreprocessingByType(
config: ValidatedPreprocessConfig,
jobId: string,
options: PreprocessingOptions
): Promise<PreprocessResult> {
switch (config.type) {
case PreprocessType.ZFS:
return this.executeZfsPreprocessing(config, jobId, options);
case PreprocessType.FLASH:
return this.executeFlashPreprocessing(config, jobId, options);
case PreprocessType.SCRIPT:
return this.executeScriptPreprocessing(config, jobId, options);
default:
throw new BadRequestException(`Unsupported preprocessing type: ${config.type}`);
}
}
private async executeZfsPreprocessing(
config: ValidatedPreprocessConfig,
jobId: string,
options: PreprocessingOptions
): Promise<PreprocessResult> {
const zfsConfig = config.config?.zfs;
if (!zfsConfig) {
throw new BadRequestException('ZFS configuration is required');
}
const snapshotName = `${zfsConfig.snapshotPrefix || 'backup'}-${Date.now()}`;
const datasetPath = `${zfsConfig.poolName}/${zfsConfig.datasetName}`;
const fullSnapshotName = `${datasetPath}@${snapshotName}`;
try {
const createSnapshotOptions: StreamingJobOptions = {
command: 'zfs',
args: ['snapshot', fullSnapshotName],
timeout: config.timeout * 1000,
onProgress: options.onProgress,
onOutput: options.onOutput,
onError: options.onError,
};
const { promise: snapshotPromise } = await this.streamingJobManager.startStreamingJob(
PreprocessType.ZFS,
createSnapshotOptions
);
const snapshotResult = await snapshotPromise;
if (!snapshotResult.success) {
throw new Error(`Failed to create ZFS snapshot: ${snapshotResult.error}`);
}
this.logger.log(`Created ZFS snapshot: ${fullSnapshotName}`);
return this.createSuccessResult(jobId, {
type: 'zfs',
snapshotName: fullSnapshotName,
streamPath: fullSnapshotName,
cleanupRequired: zfsConfig.cleanupSnapshots,
metadata: {
poolName: zfsConfig.poolName,
datasetName: zfsConfig.datasetName,
snapshotPrefix: zfsConfig.snapshotPrefix,
retainSnapshots: zfsConfig.retainSnapshots,
},
});
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown ZFS error';
this.logger.error(`ZFS preprocessing failed for job ${jobId}: ${errorMessage}`);
if (config.cleanupOnFailure) {
await this.cleanupZfsSnapshot(fullSnapshotName);
}
throw error;
}
}
private async executeFlashPreprocessing(
config: ValidatedPreprocessConfig,
jobId: string,
options: PreprocessingOptions
): Promise<PreprocessResult> {
const flashConfig = config.config?.flash;
if (!flashConfig) {
throw new BadRequestException('Flash configuration is required');
}
const gitRepoPath = `${flashConfig.flashPath}/.git`;
try {
if (flashConfig.includeGitHistory && !existsSync(gitRepoPath)) {
const initOptions: StreamingJobOptions = {
command: 'git',
args: ['init'],
cwd: flashConfig.flashPath,
timeout: config.timeout * 1000,
onProgress: options.onProgress,
onOutput: options.onOutput,
onError: options.onError,
};
const { promise: initPromise } = await this.streamingJobManager.startStreamingJob(
PreprocessType.FLASH,
initOptions
);
const initResult = await initPromise;
if (!initResult.success) {
throw new Error(`Failed to initialize git repository: ${initResult.error}`);
}
const addOptions: StreamingJobOptions = {
command: 'git',
args: ['add', '.'],
cwd: flashConfig.flashPath,
timeout: config.timeout * 1000,
onProgress: options.onProgress,
onOutput: options.onOutput,
onError: options.onError,
};
const { promise: addPromise } = await this.streamingJobManager.startStreamingJob(
PreprocessType.FLASH,
addOptions
);
const addResult = await addPromise;
if (!addResult.success) {
this.logger.warn(`Git add failed, continuing: ${addResult.error}`);
}
const commitOptions: StreamingJobOptions = {
command: 'git',
args: ['commit', '-m', `Backup snapshot ${new Date().toISOString()}`],
cwd: flashConfig.flashPath,
timeout: config.timeout * 1000,
onProgress: options.onProgress,
onOutput: options.onOutput,
onError: options.onError,
};
const { promise: commitPromise } = await this.streamingJobManager.startStreamingJob(
PreprocessType.FLASH,
commitOptions
);
const commitResult = await commitPromise;
if (!commitResult.success) {
this.logger.warn(`Git commit failed, continuing: ${commitResult.error}`);
}
}
this.logger.log(`Flash preprocessing completed for job ${jobId}`);
return this.createSuccessResult(jobId, {
type: 'flash',
streamPath: flashConfig.flashPath,
cleanupRequired: false,
metadata: {
flashPath: flashConfig.flashPath,
includeGitHistory: flashConfig.includeGitHistory,
additionalPaths: flashConfig.additionalPaths,
gitInitialized: !existsSync(gitRepoPath),
},
});
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown Flash error';
this.logger.error(`Flash preprocessing failed for job ${jobId}: ${errorMessage}`);
throw error;
}
}
private async executeScriptPreprocessing(
config: ValidatedPreprocessConfig,
jobId: string,
options: PreprocessingOptions
): Promise<PreprocessResult> {
const scriptConfig = config.config?.script;
if (!scriptConfig) {
throw new BadRequestException('Script configuration is required');
}
try {
const scriptOptions: StreamingJobOptions = {
command: scriptConfig.scriptPath,
args: scriptConfig.scriptArgs || [],
cwd: scriptConfig.workingDirectory,
env: scriptConfig.environment,
timeout: config.timeout * 1000,
onProgress: options.onProgress,
onOutput: options.onOutput,
onError: options.onError,
};
const { promise: scriptPromise } = await this.streamingJobManager.startStreamingJob(
PreprocessType.SCRIPT,
scriptOptions
);
const scriptResult = await scriptPromise;
if (!scriptResult.success) {
throw new Error(`Script execution failed: ${scriptResult.error}`);
}
this.logger.log(`Script preprocessing completed for job ${jobId}`);
return this.createSuccessResult(jobId, {
type: 'script',
outputPath: scriptConfig.outputPath,
cleanupRequired: true,
metadata: {
scriptPath: scriptConfig.scriptPath,
scriptArgs: scriptConfig.scriptArgs,
workingDirectory: scriptConfig.workingDirectory,
exitCode: scriptResult.exitCode,
duration: scriptResult.duration,
},
});
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown Script error';
this.logger.error(`Script preprocessing failed for job ${jobId}: ${errorMessage}`);
if (config.cleanupOnFailure && existsSync(scriptConfig.outputPath)) {
await this.cleanupScriptOutput(scriptConfig.outputPath);
}
throw error;
}
}
async cleanup(jobId: string): Promise<void> {
const result = this.activePreprocessJobs.get(jobId);
if (!result) {
this.logger.warn(`No preprocessing result found for cleanup of job ${jobId}`);
return;
}
try {
if (result.cleanupRequired) {
switch (result.metadata?.type) {
case 'zfs':
if (result.snapshotName) {
await this.cleanupZfsSnapshot(result.snapshotName);
}
break;
case 'script':
if (result.outputPath) {
await this.cleanupScriptOutput(result.outputPath);
}
break;
case 'flash':
break;
}
}
this.activePreprocessJobs.delete(jobId);
this.logger.log(`Cleanup completed for preprocessing job ${jobId}`);
} catch (error) {
this.logger.error(`Cleanup failed for job ${jobId}:`, error);
}
}
private async cleanupZfsSnapshot(snapshotName: string): Promise<void> {
try {
const { promise } = await this.streamingJobManager.startStreamingJob(PreprocessType.ZFS, {
command: 'zfs',
args: ['destroy', snapshotName],
timeout: 30000,
});
const result = await promise;
if (result.success) {
this.logger.log(`Cleaned up ZFS snapshot: ${snapshotName}`);
} else {
this.logger.error(`Failed to cleanup ZFS snapshot ${snapshotName}: ${result.error}`);
}
} catch (error) {
this.logger.error(`Error during ZFS snapshot cleanup: ${error}`);
}
}
private async cleanupScriptOutput(outputPath: string): Promise<void> {
try {
if (existsSync(outputPath)) {
await unlink(outputPath);
this.logger.log(`Cleaned up script output file: ${outputPath}`);
}
} catch (error) {
this.logger.error(`Failed to cleanup script output ${outputPath}: ${error}`);
}
}
private createSuccessResult(jobId: string, metadata: Record<string, unknown>): PreprocessResult {
return {
success: true,
...metadata,
metadata,
};
}
private createErrorResult(jobId: string, error: string): PreprocessResult {
return {
success: false,
error,
cleanupRequired: false,
};
}
private setupEventListeners(): void {
this.streamingJobManager.on('jobStarted', (jobInfo: StreamingJobInfo) => {
this.emit('streamingJobStarted', jobInfo);
});
this.streamingJobManager.on(
'jobProgress',
({ jobId, progress }: { jobId: string; progress: number }) => {
this.emit('preprocessingProgress', { jobId, progress });
}
);
this.streamingJobManager.on(
'jobCompleted',
({ jobInfo, result }: { jobInfo: StreamingJobInfo; result: StreamingJobResult }) => {
this.emit('streamingJobCompleted', { jobInfo, result });
}
);
}
getActiveJobs(): Map<string, PreprocessResult> {
return new Map(this.activePreprocessJobs);
}
getJobResult(jobId: string): PreprocessResult | undefined {
return this.activePreprocessJobs.get(jobId);
}
async cancelJob(jobId: string): Promise<boolean> {
const cancelled = this.streamingJobManager.cancelJob(jobId);
if (cancelled) {
await this.cleanup(jobId);
}
return cancelled;
}
async cleanupAllJobs(): Promise<void> {
const jobIds = Array.from(this.activePreprocessJobs.keys());
await Promise.all(jobIds.map((jobId) => this.cleanup(jobId)));
await this.streamingJobManager.cleanupAllJobs();
}
}

View File

@@ -0,0 +1,229 @@
import { Field, InputType, ObjectType, registerEnumType } from '@nestjs/graphql';
import { Type } from 'class-transformer';
import {
IsBoolean,
IsEnum,
IsNotEmpty,
IsNumber,
IsOptional,
IsString,
Min,
ValidateIf,
ValidateNested,
} from 'class-validator';
import { GraphQLJSON } from 'graphql-scalars';
export enum PreprocessType {
NONE = 'none',
ZFS = 'zfs',
FLASH = 'flash',
SCRIPT = 'script',
}
registerEnumType(PreprocessType, {
name: 'PreprocessType',
description: 'Type of preprocessing to perform before backup',
});
@InputType()
export class ZfsPreprocessConfigInput {
@Field(() => String, { description: 'ZFS pool name' })
@IsString()
@IsNotEmpty()
poolName!: string;
@Field(() => String, { description: 'Dataset name within the pool' })
@IsString()
@IsNotEmpty()
datasetName!: string;
@Field(() => String, { description: 'Snapshot name prefix', nullable: true })
@IsOptional()
@IsString()
snapshotPrefix?: string;
@Field(() => Boolean, {
description: 'Whether to cleanup snapshots after backup',
defaultValue: true,
})
@IsBoolean()
cleanupSnapshots!: boolean;
@Field(() => Number, { description: 'Number of snapshots to retain', nullable: true })
@IsOptional()
@IsNumber()
@Min(1)
retainSnapshots?: number;
}
@ObjectType()
export class ZfsPreprocessConfig {
@Field(() => String)
poolName!: string;
@Field(() => String)
datasetName!: string;
@Field(() => String, { nullable: true })
snapshotPrefix?: string;
@Field(() => Boolean)
cleanupSnapshots!: boolean;
@Field(() => Number, { nullable: true })
retainSnapshots?: number;
}
@InputType()
export class FlashPreprocessConfigInput {
@Field(() => String, { description: 'Flash drive mount path', defaultValue: '/boot' })
@IsString()
@IsNotEmpty()
flashPath!: string;
@Field(() => Boolean, { description: 'Whether to include git history', defaultValue: true })
@IsBoolean()
includeGitHistory!: boolean;
@Field(() => [String], { description: 'Additional paths to include in backup', nullable: true })
@IsOptional()
additionalPaths?: string[];
}
@ObjectType()
export class FlashPreprocessConfig {
@Field(() => String)
flashPath!: string;
@Field(() => Boolean)
includeGitHistory!: boolean;
@Field(() => [String], { nullable: true })
additionalPaths?: string[];
}
@InputType()
export class ScriptPreprocessConfigInput {
@Field(() => String, { description: 'Path to the script file' })
@IsString()
@IsNotEmpty()
scriptPath!: string;
@Field(() => [String], { description: 'Arguments to pass to the script', nullable: true })
@IsOptional()
scriptArgs?: string[];
@Field(() => String, { description: 'Working directory for script execution', nullable: true })
@IsOptional()
@IsString()
workingDirectory?: string;
@Field(() => GraphQLJSON, {
description: 'Environment variables for script execution',
nullable: true,
})
@IsOptional()
environment?: Record<string, string>;
@Field(() => String, { description: 'Output file path where script should write data' })
@IsString()
@IsNotEmpty()
outputPath!: string;
}
@ObjectType()
export class ScriptPreprocessConfig {
@Field(() => String)
scriptPath!: string;
@Field(() => [String], { nullable: true })
scriptArgs?: string[];
@Field(() => String, { nullable: true })
workingDirectory?: string;
@Field(() => GraphQLJSON, { nullable: true })
environment?: Record<string, string>;
@Field(() => String)
outputPath!: string;
}
@InputType()
export class PreprocessConfigInput {
@Field(() => PreprocessType, { description: 'Type of preprocessing to perform' })
@IsEnum(PreprocessType)
type!: PreprocessType;
@Field(() => ZfsPreprocessConfigInput, { nullable: true })
@IsOptional()
@ValidateIf((o) => o.type === PreprocessType.ZFS)
@ValidateNested()
@Type(() => ZfsPreprocessConfigInput)
zfsConfig?: ZfsPreprocessConfigInput;
@Field(() => FlashPreprocessConfigInput, { nullable: true })
@IsOptional()
@ValidateIf((o) => o.type === PreprocessType.FLASH)
@ValidateNested()
@Type(() => FlashPreprocessConfigInput)
flashConfig?: FlashPreprocessConfigInput;
@Field(() => ScriptPreprocessConfigInput, { nullable: true })
@IsOptional()
@ValidateIf((o) => o.type === PreprocessType.SCRIPT)
@ValidateNested()
@Type(() => ScriptPreprocessConfigInput)
scriptConfig?: ScriptPreprocessConfigInput;
@Field(() => Number, { description: 'Timeout for preprocessing in seconds', defaultValue: 3600 })
@IsNumber()
@Min(1)
timeout!: number;
@Field(() => Boolean, { description: 'Whether to cleanup on failure', defaultValue: true })
@IsBoolean()
cleanupOnFailure!: boolean;
}
@ObjectType()
export class PreprocessConfig {
@Field(() => PreprocessType)
type!: PreprocessType;
@Field(() => ZfsPreprocessConfig, { nullable: true })
zfsConfig?: ZfsPreprocessConfig;
@Field(() => FlashPreprocessConfig, { nullable: true })
flashConfig?: FlashPreprocessConfig;
@Field(() => ScriptPreprocessConfig, { nullable: true })
scriptConfig?: ScriptPreprocessConfig;
@Field(() => Number)
timeout!: number;
@Field(() => Boolean)
cleanupOnFailure!: boolean;
}
export interface PreprocessResult {
success: boolean;
streamPath?: string;
outputPath?: string;
snapshotName?: string;
error?: string;
cleanupRequired?: boolean;
metadata?: Record<string, unknown>;
}
export interface StreamingJobInfo {
jobId: string;
processId: number;
startTime: Date;
type: PreprocessType;
status: 'running' | 'completed' | 'failed' | 'cancelled';
progress?: number;
error?: string;
}

View File

@@ -0,0 +1,248 @@
import { Injectable, Logger } from '@nestjs/common';
import { promises as fs } from 'fs';
import { dirname, join } from 'path';
import { execa } from 'execa';
import {
PreprocessResult,
ScriptPreprocessConfig,
} from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.js';
@Injectable()
export class ScriptPreprocessingService {
private readonly logger = new Logger(ScriptPreprocessingService.name);
private readonly tempDir = '/tmp/unraid-script-preprocessing';
private readonly maxOutputSize = 100 * 1024 * 1024; // 100MB limit
async executeScript(config: ScriptPreprocessConfig): Promise<PreprocessResult> {
const startTime = Date.now();
try {
await this.ensureTempDirectory();
const { command, args } = this.buildCommand(config);
this.logger.log(`Executing script: ${command} ${args.join(' ')}`);
await this.runScriptWithTimeout(command, args, 3600); // Default 1 hour timeout
const outputSize = await this.getFileSize(config.outputPath);
if (outputSize === 0) {
throw new Error('Script produced no output');
}
if (outputSize > this.maxOutputSize) {
throw new Error(
`Script output too large: ${outputSize} bytes (max: ${this.maxOutputSize})`
);
}
const duration = Date.now() - startTime;
this.logger.log(
`Script completed successfully in ${duration}ms, output size: ${outputSize} bytes`
);
return {
success: true,
outputPath: config.outputPath,
metadata: {
scriptPath: config.scriptPath,
duration,
outputSize,
workingDirectory: config.workingDirectory,
scriptArgs: config.scriptArgs,
},
};
} catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : String(error);
this.logger.error(`Script preprocessing failed: ${errorMessage}`);
// Cleanup output file on failure
try {
await fs.unlink(config.outputPath);
} catch {
// Ignore cleanup errors
}
return {
success: false,
error: errorMessage,
metadata: {
scriptPath: config.scriptPath,
duration: Date.now() - startTime,
workingDirectory: config.workingDirectory,
scriptArgs: config.scriptArgs,
},
};
}
}
private async ensureTempDirectory(): Promise<void> {
try {
await fs.access(this.tempDir);
} catch {
await fs.mkdir(this.tempDir, { recursive: true, mode: 0o700 });
}
}
private buildCommand(config: ScriptPreprocessConfig): { command: string; args: string[] } {
// Sandboxed execution with restricted permissions
const command = 'timeout';
const args = [
'3600s', // 1 hour timeout
'nice',
'-n',
'10',
'ionice',
'-c',
'3',
'bash',
'-c',
`cd "${config.workingDirectory || '/tmp'}" && exec "${config.scriptPath}" ${(config.scriptArgs || []).join(' ')}`,
];
return { command, args };
}
private async runScriptWithTimeout(
command: string,
args: string[],
timeoutSeconds: number
): Promise<void> {
try {
await execa(command, args, {
timeout: timeoutSeconds * 1000,
stdio: ['ignore', 'pipe', 'pipe'],
env: {
...process.env,
PATH: '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin',
},
uid: 99, // nobody user
gid: 99, // nobody group
});
} catch (error: any) {
if (error.timedOut) {
throw new Error(`Script timeout after ${timeoutSeconds} seconds`);
}
if (error.signal) {
throw new Error(`Script killed by signal: ${error.signal}`);
}
if (error.exitCode !== undefined && error.exitCode !== 0) {
throw new Error(
`Script exited with code ${error.exitCode}. stderr: ${error.stderr || ''}`
);
}
throw new Error(`Failed to execute script: ${error.message}`);
}
}
private async getFileSize(filePath: string): Promise<number> {
try {
const stats = await fs.stat(filePath);
return stats.size;
} catch {
return 0;
}
}
async validateScript(config: ScriptPreprocessConfig): Promise<{ valid: boolean; error?: string }> {
try {
// Check if script exists and is executable
await fs.access(config.scriptPath, fs.constants.F_OK | fs.constants.X_OK);
// Check script is not in restricted locations
const restrictedPaths = ['/boot', '/mnt/user', '/mnt/disk'];
const isRestricted = restrictedPaths.some((path) => config.scriptPath.startsWith(path));
if (isRestricted) {
return {
valid: false,
error: 'Script cannot be located in restricted paths (/boot, /mnt/user, /mnt/disk*)',
};
}
// Validate working directory if specified
if (config.workingDirectory) {
try {
await fs.access(config.workingDirectory, fs.constants.F_OK);
} catch {
return {
valid: false,
error: `Working directory does not exist: ${config.workingDirectory}`,
};
}
}
// Validate output path directory exists
const outputDir = dirname(config.outputPath);
try {
await fs.access(outputDir, fs.constants.F_OK | fs.constants.W_OK);
} catch {
return {
valid: false,
error: `Output directory does not exist or is not writable: ${outputDir}`,
};
}
// Validate script arguments
if (config.scriptArgs) {
for (const arg of config.scriptArgs) {
if (arg.length > 1000) {
return {
valid: false,
error: `Script argument too long (max 1000 characters): ${arg.substring(0, 50)}...`,
};
}
}
}
return { valid: true };
} catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : String(error);
return {
valid: false,
error: `Script validation failed: ${errorMessage}`,
};
}
}
async cleanup(outputPath: string): Promise<void> {
await this.cleanupFile(outputPath);
}
private async cleanupFile(filePath: string): Promise<void> {
try {
await fs.unlink(filePath);
this.logger.debug(`Cleaned up file: ${filePath}`);
} catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : String(error);
this.logger.warn(`Failed to cleanup file ${filePath}: ${errorMessage}`);
}
}
async cleanupTempDirectory(): Promise<void> {
try {
const files = await fs.readdir(this.tempDir);
const now = Date.now();
const maxAge = 24 * 60 * 60 * 1000; // 24 hours
for (const file of files) {
const filePath = join(this.tempDir, file);
try {
const stats = await fs.stat(filePath);
if (now - stats.mtime.getTime() > maxAge) {
await fs.unlink(filePath);
this.logger.debug(`Cleaned up old temp file: ${filePath}`);
}
} catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : String(error);
this.logger.warn(`Failed to cleanup old temp file ${filePath}: ${errorMessage}`);
}
}
} catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : String(error);
this.logger.warn(`Failed to cleanup temp directory: ${errorMessage}`);
}
}
}

View File

@@ -0,0 +1,285 @@
import { Injectable, Logger } from '@nestjs/common';
import { access, stat } from 'fs/promises';
import { dirname, isAbsolute, resolve } from 'path';
import { ScriptPreprocessConfigInput } from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.js';
export interface ScriptValidationResult {
isValid: boolean;
errors: string[];
warnings: string[];
metadata: {
scriptExists?: boolean;
scriptExecutable?: boolean;
workingDirectoryExists?: boolean;
outputDirectoryExists?: boolean;
outputDirectoryWritable?: boolean;
environmentVariablesValid?: boolean;
resolvedScriptPath?: string;
resolvedWorkingDirectory?: string;
resolvedOutputPath?: string;
};
}
@Injectable()
export class ScriptValidationService {
private readonly logger = new Logger(ScriptValidationService.name);
async validateScriptConfig(config: ScriptPreprocessConfigInput): Promise<ScriptValidationResult> {
const result: ScriptValidationResult = {
isValid: true,
errors: [],
warnings: [],
metadata: {},
};
try {
// Resolve and validate script path
const resolvedScriptPath = this.resolveScriptPath(
config.scriptPath,
config.workingDirectory
);
result.metadata.resolvedScriptPath = resolvedScriptPath;
const scriptExists = await this.validateScriptExists(resolvedScriptPath);
result.metadata.scriptExists = scriptExists;
if (!scriptExists) {
result.errors.push(`Script file '${resolvedScriptPath}' does not exist`);
result.isValid = false;
return result;
}
// Check if script is executable
const scriptExecutable = await this.validateScriptExecutable(resolvedScriptPath);
result.metadata.scriptExecutable = scriptExecutable;
if (!scriptExecutable) {
result.warnings.push(`Script file '${resolvedScriptPath}' may not be executable`);
}
// Validate working directory
if (config.workingDirectory) {
const resolvedWorkingDir = resolve(config.workingDirectory);
result.metadata.resolvedWorkingDirectory = resolvedWorkingDir;
const workingDirExists = await this.validateDirectory(resolvedWorkingDir);
result.metadata.workingDirectoryExists = workingDirExists;
if (!workingDirExists) {
result.errors.push(`Working directory '${resolvedWorkingDir}' does not exist`);
result.isValid = false;
}
}
// Validate output path and directory
const resolvedOutputPath = this.resolveOutputPath(
config.outputPath,
config.workingDirectory
);
result.metadata.resolvedOutputPath = resolvedOutputPath;
const outputDirectory = dirname(resolvedOutputPath);
const outputDirExists = await this.validateDirectory(outputDirectory);
result.metadata.outputDirectoryExists = outputDirExists;
if (!outputDirExists) {
result.errors.push(`Output directory '${outputDirectory}' does not exist`);
result.isValid = false;
} else {
// Check if output directory is writable
const outputDirWritable = await this.validateDirectoryWritable(outputDirectory);
result.metadata.outputDirectoryWritable = outputDirWritable;
if (!outputDirWritable) {
result.errors.push(`Output directory '${outputDirectory}' is not writable`);
result.isValid = false;
}
}
// Validate environment variables
if (config.environment) {
const envValid = this.validateEnvironmentVariables(config.environment);
result.metadata.environmentVariablesValid = envValid;
if (!envValid) {
result.warnings.push('Some environment variables may contain invalid values');
}
}
// Security validations
this.performSecurityValidations(config, result);
} catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : String(error);
result.errors.push(`Validation failed: ${errorMessage}`);
result.isValid = false;
}
return result;
}
private resolveScriptPath(scriptPath: string, workingDirectory?: string): string {
if (isAbsolute(scriptPath)) {
return scriptPath;
}
const baseDir = workingDirectory || process.cwd();
return resolve(baseDir, scriptPath);
}
private resolveOutputPath(outputPath: string, workingDirectory?: string): string {
if (isAbsolute(outputPath)) {
return outputPath;
}
const baseDir = workingDirectory || process.cwd();
return resolve(baseDir, outputPath);
}
async validateScriptExists(scriptPath: string): Promise<boolean> {
try {
await access(scriptPath);
const stats = await stat(scriptPath);
return stats.isFile();
} catch {
return false;
}
}
async validateScriptExecutable(scriptPath: string): Promise<boolean> {
try {
const stats = await stat(scriptPath);
// Check if file has execute permissions (basic check)
return (stats.mode & parseInt('111', 8)) !== 0;
} catch {
return false;
}
}
async validateDirectory(dirPath: string): Promise<boolean> {
try {
await access(dirPath);
const stats = await stat(dirPath);
return stats.isDirectory();
} catch {
return false;
}
}
async validateDirectoryWritable(dirPath: string): Promise<boolean> {
try {
const stats = await stat(dirPath);
// Check if directory has write permissions (basic check)
return (stats.mode & parseInt('200', 8)) !== 0;
} catch {
return false;
}
}
validateEnvironmentVariables(environment: Record<string, string>): boolean {
try {
// Check for potentially dangerous environment variables
const dangerousVars = ['PATH', 'LD_LIBRARY_PATH', 'HOME', 'USER'];
const hasDangerousVars = Object.keys(environment).some((key) =>
dangerousVars.includes(key.toUpperCase())
);
if (hasDangerousVars) {
this.logger.warn('Script environment contains potentially dangerous variables');
}
// Check for valid variable names (basic validation)
const validVarName = /^[A-Za-z_][A-Za-z0-9_]*$/;
const invalidVars = Object.keys(environment).filter((key) => !validVarName.test(key));
if (invalidVars.length > 0) {
this.logger.warn(`Invalid environment variable names: ${invalidVars.join(', ')}`);
return false;
}
return true;
} catch {
return false;
}
}
private performSecurityValidations(
config: ScriptPreprocessConfigInput,
result: ScriptValidationResult
): void {
// Check for potentially dangerous script paths
const dangerousPaths = ['/bin', '/usr/bin', '/sbin', '/usr/sbin'];
const scriptInDangerousPath = dangerousPaths.some((path) =>
result.metadata.resolvedScriptPath?.startsWith(path)
);
if (scriptInDangerousPath) {
result.warnings.push(
'Script is located in a system directory. Ensure it is safe to execute.'
);
}
// Check for dangerous script arguments
if (config.scriptArgs) {
const dangerousArgs = config.scriptArgs.filter(
(arg) =>
arg.includes('..') ||
arg.includes('rm ') ||
arg.includes('sudo ') ||
arg.includes('su ')
);
if (dangerousArgs.length > 0) {
result.warnings.push(
'Script arguments contain potentially dangerous commands or paths.'
);
}
}
// Check if output path is in a safe location
if (result.metadata.resolvedOutputPath) {
const systemPaths = ['/bin', '/usr', '/etc', '/var', '/sys', '/proc'];
const outputInSystemPath = systemPaths.some((path) =>
result.metadata.resolvedOutputPath?.startsWith(path)
);
if (outputInSystemPath) {
result.errors.push('Output path cannot be in system directories for security reasons.');
result.isValid = false;
}
}
// Validate script file extension for common script types
if (result.metadata.resolvedScriptPath) {
const scriptExt = result.metadata.resolvedScriptPath.split('.').pop()?.toLowerCase();
const allowedExtensions = ['sh', 'bash', 'py', 'pl', 'rb', 'js', 'php'];
if (scriptExt && !allowedExtensions.includes(scriptExt)) {
result.warnings.push(
`Script extension '.${scriptExt}' is not commonly recognized. Ensure it is executable.`
);
}
}
}
async getScriptInfo(scriptPath: string): Promise<{
size: number | null;
lastModified: Date | null;
permissions: string | null;
}> {
try {
const stats = await stat(scriptPath);
return {
size: stats.size,
lastModified: stats.mtime,
permissions: '0' + (stats.mode & parseInt('777', 8)).toString(8),
};
} catch {
return {
size: null,
lastModified: null,
permissions: null,
};
}
}
}

View File

@@ -0,0 +1,279 @@
import { Injectable, Logger } from '@nestjs/common';
import { EventEmitter } from 'events';
import { execa } from 'execa';
import { v4 as uuidv4 } from 'uuid';
import {
PreprocessType,
StreamingJobInfo,
} from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.js';
export interface StreamingJobOptions {
command: string;
args: string[];
cwd?: string;
env?: Record<string, string>;
timeout?: number;
onProgress?: (progress: number) => void;
onOutput?: (data: string) => void;
onError?: (error: string) => void;
}
export interface StreamingJobResult {
success: boolean;
exitCode?: number;
signal?: string;
error?: string;
output?: string;
duration: number;
}
@Injectable()
export class StreamingJobManager extends EventEmitter {
private readonly logger = new Logger(StreamingJobManager.name);
private readonly activeJobs = new Map<string, StreamingJobInfo>();
private readonly processes = new Map<string, ReturnType<typeof execa>>();
async startStreamingJob(
type: PreprocessType,
options: StreamingJobOptions
): Promise<{ jobId: string; promise: Promise<StreamingJobResult> }> {
const jobId = uuidv4();
const startTime = new Date();
const jobInfo: StreamingJobInfo = {
jobId,
processId: 0,
startTime,
type,
status: 'running',
progress: 0,
};
this.activeJobs.set(jobId, jobInfo);
const promise = this.executeStreamingJob(jobId, options);
this.logger.log(`Started streaming job ${jobId} for ${type}`);
this.emit('jobStarted', jobInfo);
return { jobId, promise };
}
private async executeStreamingJob(
jobId: string,
options: StreamingJobOptions
): Promise<StreamingJobResult> {
const startTime = Date.now();
let timeoutHandle: NodeJS.Timeout | undefined;
return new Promise((resolve) => {
const jobInfo = this.activeJobs.get(jobId);
if (!jobInfo) {
resolve({
success: false,
error: 'Job not found',
duration: 0,
});
return;
}
const childProcess = execa(options.command, options.args, {
cwd: options.cwd,
env: { ...process.env, ...options.env },
stdio: ['pipe', 'pipe', 'pipe'],
timeout: options.timeout,
killSignal: 'SIGTERM',
});
jobInfo.processId = childProcess.pid || 0;
this.processes.set(jobId, childProcess);
let output = '';
let errorOutput = '';
if (options.timeout) {
timeoutHandle = setTimeout(() => {
this.logger.warn(`Streaming job ${jobId} timed out after ${options.timeout}ms`);
this.cancelJob(jobId);
}, options.timeout);
}
childProcess.stdout?.on('data', (data: Buffer) => {
const chunk = data.toString();
output += chunk;
if (options.onOutput) {
options.onOutput(chunk);
}
this.extractProgress(jobId, chunk, options.onProgress);
});
childProcess.stderr?.on('data', (data: Buffer) => {
const chunk = data.toString();
errorOutput += chunk;
if (options.onError) {
options.onError(chunk);
}
this.extractProgress(jobId, chunk, options.onProgress);
});
childProcess
.then((result) => {
if (timeoutHandle) {
clearTimeout(timeoutHandle);
}
const duration = Date.now() - startTime;
const success = result.exitCode === 0;
jobInfo.status = success ? 'completed' : 'failed';
if (!success) {
jobInfo.error = result.stderr || `Process exited with code ${result.exitCode}`;
}
this.cleanup(jobId);
const jobResult: StreamingJobResult = {
success,
exitCode: result.exitCode,
output: success ? result.stdout : undefined,
duration,
};
this.logger.log(
`Streaming job ${jobId} completed: ${success ? 'success' : 'failed'}`
);
this.emit('jobCompleted', { jobInfo, result: jobResult });
resolve(jobResult);
})
.catch((error) => {
if (timeoutHandle) {
clearTimeout(timeoutHandle);
}
const duration = Date.now() - startTime;
jobInfo.status = error.isCanceled ? 'cancelled' : 'failed';
jobInfo.error = error.message;
this.cleanup(jobId);
const jobResult: StreamingJobResult = {
success: false,
exitCode: error.exitCode,
signal: error.signal,
error: error.message,
duration,
};
this.logger.error(`Streaming job ${jobId} failed:`, error);
this.emit('jobCompleted', { jobInfo, result: jobResult });
resolve(jobResult);
});
});
}
private extractProgress(
jobId: string,
output: string,
onProgress?: (progress: number) => void
): void {
const jobInfo = this.activeJobs.get(jobId);
if (!jobInfo) return;
let progress = jobInfo.progress || 0;
if (jobInfo.type === PreprocessType.ZFS) {
const match = output.match(/(\d+(?:\.\d+)?)%/);
if (match) {
progress = parseFloat(match[1]);
}
} else if (jobInfo.type === PreprocessType.FLASH) {
const lines = output.split('\n');
const totalLines = lines.length;
if (totalLines > 0) {
progress = Math.min(100, (totalLines / 1000) * 100);
}
}
if (progress !== jobInfo.progress) {
jobInfo.progress = progress;
if (onProgress) {
onProgress(progress);
}
this.emit('jobProgress', { jobId, progress });
}
}
cancelJob(jobId: string): boolean {
const childProcess = this.processes.get(jobId);
const jobInfo = this.activeJobs.get(jobId);
if (!childProcess || !jobInfo) {
return false;
}
try {
jobInfo.status = 'cancelled';
childProcess.kill('SIGTERM');
this.logger.log(`Cancelled streaming job ${jobId}`);
this.emit('jobCancelled', jobInfo);
return true;
} catch (error) {
this.logger.error(`Failed to cancel streaming job ${jobId}:`, error);
return false;
}
}
getJobInfo(jobId: string): StreamingJobInfo | undefined {
return this.activeJobs.get(jobId);
}
getAllActiveJobs(): StreamingJobInfo[] {
return Array.from(this.activeJobs.values());
}
getJobsByType(type: PreprocessType): StreamingJobInfo[] {
return Array.from(this.activeJobs.values()).filter((job) => job.type === type);
}
private cleanup(jobId: string): void {
this.processes.delete(jobId);
this.activeJobs.delete(jobId);
}
async cleanupAllJobs(): Promise<void> {
const activeJobIds = Array.from(this.activeJobs.keys());
for (const jobId of activeJobIds) {
this.cancelJob(jobId);
}
await new Promise((resolve) => setTimeout(resolve, 1000));
this.processes.clear();
this.activeJobs.clear();
this.logger.log('Cleaned up all streaming jobs');
}
isJobRunning(jobId: string): boolean {
const jobInfo = this.activeJobs.get(jobId);
return jobInfo?.status === 'running';
}
getJobCount(): number {
return this.activeJobs.size;
}
getJobCountByType(type: PreprocessType): number {
return this.getJobsByType(type).length;
}
}

View File

@@ -0,0 +1,179 @@
import { Injectable, Logger } from '@nestjs/common';
import { execa } from 'execa';
import {
PreprocessResult,
PreprocessType,
ZfsPreprocessConfigInput,
} from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.js';
import {
StreamingJobManager,
StreamingJobOptions,
} from '@app/unraid-api/graph/resolvers/backup/preprocessing/streaming-job-manager.service.js';
import { ZfsValidationService } from '@app/unraid-api/graph/resolvers/backup/preprocessing/zfs-validation.service.js';
@Injectable()
export class ZfsPreprocessingService {
private readonly logger = new Logger(ZfsPreprocessingService.name);
constructor(
private readonly streamingJobManager: StreamingJobManager,
private readonly zfsValidationService: ZfsValidationService
) {}
async executeZfsPreprocessing(
config: ZfsPreprocessConfigInput,
remotePath: string,
timeout: number = 3600000
): Promise<PreprocessResult> {
// Validate configuration first
const validationResult = await this.zfsValidationService.validateZfsConfig(config);
if (!validationResult.isValid) {
return {
success: false,
error: `ZFS configuration validation failed: ${validationResult.errors.join(', ')}`,
metadata: {
validationErrors: validationResult.errors,
validationWarnings: validationResult.warnings,
},
};
}
// Log any warnings
if (validationResult.warnings.length > 0) {
this.logger.warn(`ZFS preprocessing warnings: ${validationResult.warnings.join(', ')}`);
}
const snapshotName = this.generateSnapshotName(config.datasetName);
const fullSnapshotPath = `${config.poolName}/${config.datasetName}@${snapshotName}`;
try {
await this.createSnapshot(fullSnapshotPath);
this.logger.log(`Created ZFS snapshot: ${fullSnapshotPath}`);
const streamingResult = await this.streamSnapshot(
fullSnapshotPath,
remotePath,
config,
timeout
);
if (config.cleanupSnapshots) {
await this.cleanupSnapshot(fullSnapshotPath);
this.logger.log(`Cleaned up ZFS snapshot: ${fullSnapshotPath}`);
}
return {
success: true,
outputPath: streamingResult.remotePath,
snapshotName: fullSnapshotPath,
metadata: {
snapshotName: fullSnapshotPath,
bytesTransferred: streamingResult.bytesTransferred,
duration: streamingResult.duration,
cleanedUp: config.cleanupSnapshots,
validationWarnings: validationResult.warnings,
datasetInfo: validationResult.metadata,
},
};
} catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : String(error);
this.logger.error(
`ZFS preprocessing failed: ${errorMessage}`,
error instanceof Error ? error.stack : undefined
);
if (config.cleanupSnapshots) {
try {
await this.cleanupSnapshot(fullSnapshotPath);
this.logger.log(`Cleaned up ZFS snapshot after failure: ${fullSnapshotPath}`);
} catch (cleanupError: unknown) {
const cleanupErrorMessage =
cleanupError instanceof Error ? cleanupError.message : String(cleanupError);
this.logger.error(`Failed to cleanup snapshot: ${cleanupErrorMessage}`);
}
}
return {
success: false,
error: errorMessage,
snapshotName: fullSnapshotPath,
cleanupRequired: config.cleanupSnapshots,
metadata: {
snapshotName: fullSnapshotPath,
cleanupAttempted: config.cleanupSnapshots,
},
};
}
}
private async createSnapshot(snapshotPath: string): Promise<void> {
try {
await execa('zfs', ['snapshot', snapshotPath]);
} catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : String(error);
throw new Error(`ZFS snapshot creation failed: ${errorMessage}`);
}
}
private async streamSnapshot(
snapshotPath: string,
remotePath: string,
config: ZfsPreprocessConfigInput,
timeout: number
): Promise<{ remotePath: string; bytesTransferred: number; duration: number }> {
const zfsSendArgs = ['send', snapshotPath];
const streamingOptions: StreamingJobOptions = {
command: 'zfs',
args: zfsSendArgs,
timeout,
onProgress: (progress) => {
this.logger.debug(`ZFS streaming progress: ${progress}%`);
},
onOutput: (data) => {
this.logger.debug(`ZFS send output: ${data.slice(0, 100)}...`);
},
onError: (error) => {
this.logger.error(`ZFS send error: ${error}`);
},
};
const { jobId, promise } = await this.streamingJobManager.startStreamingJob(
PreprocessType.ZFS,
streamingOptions
);
try {
const result = await promise;
if (!result.success) {
throw new Error(result.error || 'ZFS streaming failed');
}
return {
remotePath,
bytesTransferred: 0,
duration: result.duration,
};
} catch (error: unknown) {
this.streamingJobManager.cancelJob(jobId);
throw error;
}
}
private async cleanupSnapshot(snapshotPath: string): Promise<void> {
try {
await execa('zfs', ['destroy', snapshotPath]);
} catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : String(error);
throw new Error(`ZFS snapshot cleanup failed: ${errorMessage}`);
}
}
private generateSnapshotName(datasetName: string): string {
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
return `backup-${datasetName}-${timestamp}`;
}
}

View File

@@ -0,0 +1,244 @@
import { Injectable, Logger } from '@nestjs/common';
import { execa } from 'execa';
import { ZfsPreprocessConfigInput } from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.js';
export interface ZfsValidationResult {
isValid: boolean;
errors: string[];
warnings: string[];
metadata: {
poolExists?: boolean;
datasetExists?: boolean;
datasetSize?: number;
availableSpace?: number;
mountpoint?: string;
};
}
@Injectable()
export class ZfsValidationService {
private readonly logger = new Logger(ZfsValidationService.name);
async validateZfsConfig(config: ZfsPreprocessConfigInput): Promise<ZfsValidationResult> {
const result: ZfsValidationResult = {
isValid: true,
errors: [],
warnings: [],
metadata: {},
};
try {
// Validate pool exists
const poolExists = await this.validatePool(config.poolName);
result.metadata.poolExists = poolExists;
if (!poolExists) {
result.errors.push(`ZFS pool '${config.poolName}' does not exist`);
result.isValid = false;
return result;
}
// Validate dataset exists
const datasetExists = await this.validateDataset(config.poolName, config.datasetName);
result.metadata.datasetExists = datasetExists;
if (!datasetExists) {
result.errors.push(
`ZFS dataset '${config.poolName}/${config.datasetName}' does not exist`
);
result.isValid = false;
return result;
}
// Get dataset information
const datasetInfo = await this.getDatasetInfo(config.poolName, config.datasetName);
result.metadata = { ...result.metadata, ...datasetInfo };
// Validate dataset is mounted
if (!datasetInfo.mountpoint || datasetInfo.mountpoint === 'none') {
result.warnings.push(
`Dataset '${config.poolName}/${config.datasetName}' is not mounted`
);
}
// Check available space for snapshots
if (datasetInfo.availableSpace && datasetInfo.datasetSize) {
const spaceRatio = datasetInfo.availableSpace / datasetInfo.datasetSize;
if (spaceRatio < 0.1) {
result.warnings.push(
'Low available space for snapshot creation (less than 10% of dataset size)'
);
}
}
// Validate snapshot retention settings
if (config.retainSnapshots && config.retainSnapshots < 1) {
result.errors.push('Retain snapshots must be at least 1');
result.isValid = false;
}
// Check for existing snapshots if cleanup is disabled
if (!config.cleanupSnapshots) {
const existingSnapshots = await this.getExistingSnapshots(
config.poolName,
config.datasetName,
config.snapshotPrefix
);
if (existingSnapshots.length > 10) {
result.warnings.push(
`Found ${existingSnapshots.length} existing snapshots. Consider enabling cleanup.`
);
}
}
} catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : String(error);
result.errors.push(`Validation failed: ${errorMessage}`);
result.isValid = false;
}
return result;
}
async validatePool(poolName: string): Promise<boolean> {
try {
await execa('zpool', ['list', '-H', '-o', 'name', poolName]);
return true;
} catch {
return false;
}
}
async validateDataset(poolName: string, datasetName: string): Promise<boolean> {
const fullPath = `${poolName}/${datasetName}`;
try {
await execa('zfs', ['list', '-H', '-o', 'name', fullPath]);
return true;
} catch {
return false;
}
}
async getDatasetInfo(
poolName: string,
datasetName: string
): Promise<{
datasetSize?: number;
availableSpace?: number;
mountpoint?: string;
}> {
const fullPath = `${poolName}/${datasetName}`;
const result: { datasetSize?: number; availableSpace?: number; mountpoint?: string } = {};
try {
// Get dataset size
const { stdout: sizeOutput } = await execa('zfs', [
'list',
'-H',
'-p',
'-o',
'used',
fullPath,
]);
const size = parseInt(sizeOutput.trim(), 10);
if (!isNaN(size)) {
result.datasetSize = size;
}
} catch (error: unknown) {
this.logger.warn(
`Failed to get dataset size: ${error instanceof Error ? error.message : String(error)}`
);
}
try {
// Get available space
const { stdout: availOutput } = await execa('zfs', [
'list',
'-H',
'-p',
'-o',
'avail',
fullPath,
]);
const avail = parseInt(availOutput.trim(), 10);
if (!isNaN(avail)) {
result.availableSpace = avail;
}
} catch (error: unknown) {
this.logger.warn(
`Failed to get available space: ${error instanceof Error ? error.message : String(error)}`
);
}
try {
// Get mountpoint
const { stdout: mountOutput } = await execa('zfs', [
'list',
'-H',
'-o',
'mountpoint',
fullPath,
]);
result.mountpoint = mountOutput.trim();
} catch (error: unknown) {
this.logger.warn(
`Failed to get mountpoint: ${error instanceof Error ? error.message : String(error)}`
);
}
return result;
}
async getExistingSnapshots(
poolName: string,
datasetName: string,
prefix?: string
): Promise<string[]> {
const fullPath = `${poolName}/${datasetName}`;
try {
const { stdout } = await execa('zfs', [
'list',
'-H',
'-t',
'snapshot',
'-o',
'name',
'-r',
fullPath,
]);
const snapshots = stdout.split('\n').filter((line) => line.trim());
if (prefix) {
const prefixPattern = `${fullPath}@${prefix}`;
return snapshots.filter((snapshot) => snapshot.startsWith(prefixPattern));
}
return snapshots.filter((snapshot) => snapshot.startsWith(`${fullPath}@`));
} catch {
return [];
}
}
async getPoolHealth(poolName: string): Promise<string | null> {
try {
const { stdout } = await execa('zpool', ['list', '-H', '-o', 'health', poolName]);
return stdout.trim();
} catch {
return null;
}
}
async canCreateSnapshot(poolName: string, datasetName: string): Promise<boolean> {
// Check if we have write permissions and the dataset is not readonly
const fullPath = `${poolName}/${datasetName}`;
try {
const { stdout } = await execa('zfs', ['get', '-H', '-o', 'value', 'readonly', fullPath]);
return stdout.trim() === 'off';
} catch {
return false;
}
}
}

View File

@@ -10,8 +10,24 @@ import got, { HTTPError } from 'got';
import pRetry from 'p-retry';
import { sanitizeParams } from '@app/core/log.js';
import {
BACKUP_JOB_GROUP_PREFIX,
getBackupJobGroupId,
getConfigIdFromGroupId,
isBackupJobGroup,
} from '@app/unraid-api/graph/resolvers/backup/backup.utils.js';
import {
PreprocessType,
StreamingJobInfo,
} from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.types.js';
import {
StreamingJobManager,
StreamingJobOptions,
StreamingJobResult,
} from '@app/unraid-api/graph/resolvers/backup/preprocessing/streaming-job-manager.service.js';
import { RCloneStatusService } from '@app/unraid-api/graph/resolvers/rclone/rclone-status.service.js';
import {
BackupJobStatus,
CreateRCloneRemoteDto,
DeleteRCloneRemoteDto,
GetRCloneJobStatusDto,
@@ -20,7 +36,6 @@ import {
RCloneJob,
RCloneJobListResponse,
RCloneJobStats,
RCloneJobStatus,
RCloneProviderResponse,
RCloneRemoteConfig,
RCloneStartBackupInput,
@@ -51,6 +66,39 @@ interface JobOperationResult {
errors: string[];
}
export interface StreamingBackupOptions {
remoteName: string;
remotePath: string;
sourceStream?: NodeJS.ReadableStream;
sourceCommand?: string;
sourceArgs?: string[];
preprocessType?: PreprocessType;
onProgress?: (progress: number) => void;
onOutput?: (data: string) => void;
onError?: (error: string) => void;
timeout?: number;
}
export interface StreamingBackupResult {
success: boolean;
jobId?: string;
rcloneJobId?: string;
error?: string;
duration: number;
bytesTransferred?: number;
}
export interface UnifiedJobStatus {
jobId: string;
type: 'daemon' | 'streaming';
status: BackupJobStatus;
progress?: number;
stats?: RCloneJobStats;
error?: string;
startTime?: Date;
preprocessType?: PreprocessType;
}
const CONSTANTS = {
RETRY_CONFIG: {
retries: 6,
@@ -70,8 +118,6 @@ const CONSTANTS = {
},
} as const;
const JOB_GROUP_PREFIX = 'backup-';
@Injectable()
export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
private isInitialized: boolean = false;
@@ -84,7 +130,10 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
private readonly rclonePassword: string =
process.env.RCLONE_PASSWORD || crypto.randomBytes(24).toString('base64');
constructor(private readonly statusService: RCloneStatusService) {}
constructor(
private readonly statusService: RCloneStatusService,
private readonly streamingJobManager: StreamingJobManager
) {}
async onModuleInit(): Promise<void> {
try {
@@ -132,13 +181,23 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
const rcloneArgs = this.buildRcloneArgs(socketPath, logFilePath);
this.logger.log(`Starting RClone RC daemon on socket: ${socketPath}`);
this.rcloneProcess = execa('rclone', rcloneArgs, { detached: false });
const rcloneProcessExecution = execa('rclone', rcloneArgs, { detached: false });
this.rcloneProcess = rcloneProcessExecution;
this.setupProcessListeners();
rcloneProcessExecution.catch((error) => {
this.logger.debug(
`Rclone process execution promise rejected (expected if process failed to start or exited prematurely): ${
error.shortMessage || error.message
}`
);
});
await this.waitForSocketReady();
this.logger.log('RClone RC daemon started and socket is ready.');
return true;
} catch (error: unknown) {
this.logger.error(`Error starting RClone RC daemon: ${error}`);
this.logger.error(`Error during RClone RC daemon startup sequence: ${error}`);
this.cleanupFailedProcess();
return false;
}
@@ -317,8 +376,8 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
this.logger.log(`Starting backup: ${input.srcPath}${input.dstPath}`);
const group = input.configId
? `${JOB_GROUP_PREFIX}${input.configId}`
: JOB_GROUP_PREFIX + 'manual';
? getBackupJobGroupId(input.configId)
: BACKUP_JOB_GROUP_PREFIX + 'manual';
const params = {
srcFs: input.srcPath,
@@ -335,6 +394,185 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
return result;
}
async startStreamingBackup(options: StreamingBackupOptions): Promise<StreamingBackupResult> {
const startTime = Date.now();
try {
if (!options.sourceCommand || !options.sourceArgs) {
throw new Error('Source command and args are required for streaming backup');
}
const remotePath = `${options.remoteName}:${options.remotePath}`;
const streamingOptions: StreamingJobOptions = {
command: 'sh',
args: [
'-c',
`${options.sourceCommand} ${options.sourceArgs.join(' ')} | rclone rcat "${remotePath}"`,
],
timeout: options.timeout,
onProgress: options.onProgress,
onOutput: options.onOutput,
onError: options.onError,
};
const { jobId, promise } = await this.streamingJobManager.startStreamingJob(
options.preprocessType || PreprocessType.NONE,
streamingOptions
);
const result = await promise;
const duration = Date.now() - startTime;
return {
success: result.success,
jobId,
error: result.error,
duration,
bytesTransferred: this.extractBytesFromOutput(result.output),
};
} catch (error: unknown) {
const duration = Date.now() - startTime;
this.logger.error(`Streaming backup failed: ${error}`);
return {
success: false,
error: error instanceof Error ? error.message : String(error),
duration,
};
}
}
async getUnifiedJobStatus(jobId: string): Promise<UnifiedJobStatus | null> {
const streamingJob = this.streamingJobManager.getJobInfo(jobId);
if (streamingJob) {
return {
jobId: streamingJob.jobId,
type: 'streaming',
status: this.mapStreamingStatusToBackupStatus(streamingJob.status),
progress: streamingJob.progress,
error: streamingJob.error,
startTime: streamingJob.startTime,
preprocessType: streamingJob.type,
};
}
try {
const rcloneJob = await this.getEnhancedJobStatus(jobId);
if (rcloneJob) {
return {
jobId: rcloneJob.id,
type: 'daemon',
status: rcloneJob.status || BackupJobStatus.FAILED,
progress: rcloneJob.stats?.percent,
stats: rcloneJob.stats,
error: rcloneJob.error,
};
}
} catch (error) {
this.logger.warn(`Failed to get RClone job status for ${jobId}: ${error}`);
}
return null;
}
async getAllUnifiedJobs(): Promise<UnifiedJobStatus[]> {
const unifiedJobs: UnifiedJobStatus[] = [];
const streamingJobs = this.streamingJobManager.getAllActiveJobs();
for (const job of streamingJobs) {
unifiedJobs.push({
jobId: job.jobId,
type: 'streaming',
status: this.mapStreamingStatusToBackupStatus(job.status),
progress: job.progress,
error: job.error,
startTime: job.startTime,
preprocessType: job.type,
});
}
try {
const rcloneJobs = await this.getAllJobsWithStats();
for (const job of rcloneJobs) {
unifiedJobs.push({
jobId: job.id,
type: 'daemon',
status: job.status || BackupJobStatus.FAILED,
progress: job.stats?.percent,
stats: job.stats,
error: job.error,
});
}
} catch (error) {
this.logger.warn(`Failed to get RClone jobs: ${error}`);
}
return unifiedJobs;
}
async stopUnifiedJob(jobId: string): Promise<boolean> {
if (this.streamingJobManager.isJobRunning(jobId)) {
return this.streamingJobManager.cancelJob(jobId);
}
try {
const result = await this.stopJob(jobId);
return result.stopped.length > 0;
} catch (error) {
this.logger.warn(`Failed to stop RClone job ${jobId}: ${error}`);
return false;
}
}
private mapStreamingStatusToBackupStatus(
status: 'running' | 'completed' | 'failed' | 'cancelled'
): BackupJobStatus {
switch (status) {
case 'running':
return BackupJobStatus.RUNNING;
case 'completed':
return BackupJobStatus.COMPLETED;
case 'failed':
return BackupJobStatus.FAILED;
case 'cancelled':
return BackupJobStatus.CANCELLED;
default:
return BackupJobStatus.FAILED;
}
}
private extractBytesFromOutput(output?: string): number | undefined {
if (!output) return undefined;
const bytesMatch = output.match(/(\d+)\s*bytes/i);
if (bytesMatch) {
return parseInt(bytesMatch[1], 10);
}
const sizeMatch = output.match(/(\d+(?:\.\d+)?)\s*(KB|MB|GB|TB)/i);
if (sizeMatch) {
const value = parseFloat(sizeMatch[1]);
const unit = sizeMatch[2].toUpperCase();
switch (unit) {
case 'KB':
return Math.round(value * 1024);
case 'MB':
return Math.round(value * 1024 * 1024);
case 'GB':
return Math.round(value * 1024 * 1024 * 1024);
case 'TB':
return Math.round(value * 1024 * 1024 * 1024 * 1024);
default:
return undefined;
}
}
return undefined;
}
/**
* Gets enhanced job status with computed fields
*/
@@ -342,8 +580,7 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
try {
await validateObject(GetRCloneJobStatusDto, { jobId });
// If the jobId looks like a group name (starts with backup-), get group stats
if (jobId.startsWith(JOB_GROUP_PREFIX)) {
if (isBackupJobGroup(jobId)) {
try {
const stats = await this.callRcloneApi('core/stats', { group: jobId });
const enhancedStats = this.statusService.enhanceStatsWithFormattedFields({
@@ -352,10 +589,10 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
});
const job = this.statusService.transformStatsToJob(jobId, enhancedStats);
job.configId = configId || jobId.substring(JOB_GROUP_PREFIX.length);
job.configId = configId || getConfigIdFromGroupId(jobId);
// Add computed fields
job.isRunning = job.status === RCloneJobStatus.RUNNING;
job.isRunning = job.status === BackupJobStatus.RUNNING;
job.errorMessage = job.error || undefined;
return job;
@@ -373,7 +610,7 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
const job = this.statusService.transformStatsToJob(jobId, enhancedStats);
// Add computed fields
job.isRunning = job.status === RCloneJobStatus.RUNNING;
job.isRunning = job.status === BackupJobStatus.RUNNING;
job.errorMessage = job.error || undefined;
// Add configId if provided
@@ -448,7 +685,7 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
}
const backupGroups = (groupList.groups || []).filter((group: string) =>
group.startsWith(JOB_GROUP_PREFIX)
isBackupJobGroup(group)
);
if (backupGroups.length === 0) {
@@ -478,18 +715,15 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
this.logger.debug(`Processing group ${group}: stats=${JSON.stringify(groupStats)}`);
const configId = group.startsWith(JOB_GROUP_PREFIX)
? group.substring(JOB_GROUP_PREFIX.length)
: undefined;
const extractedConfigId = getConfigIdFromGroupId(group);
// Use the group name as the job ID for consistency, but add group info to stats
const enhancedStats = this.statusService.enhanceStatsWithFormattedFields({
...groupStats,
group, // Add group to stats so it gets picked up in transformStatsToJob
group,
});
const job = this.statusService.transformStatsToJob(group, enhancedStats);
job.configId = configId;
job.configId = extractedConfigId;
// Only include jobs that are truly active (not completed)
const isActivelyTransferring = groupStats.transferring?.length > 0;
@@ -527,8 +761,7 @@ export class RCloneApiService implements OnModuleInit, OnModuleDestroy {
async stopJob(jobId: string): Promise<JobOperationResult> {
this.logger.log(`Stopping job: ${jobId}`);
// Check if this is a group name (starts with backup-) or an individual job ID
if (jobId.startsWith(JOB_GROUP_PREFIX)) {
if (isBackupJobGroup(jobId)) {
// This is a group, use the stopgroup endpoint
return this.executeGroupOperation([jobId], 'stopgroup');
} else {

View File

@@ -3,7 +3,7 @@ import { Test, TestingModule } from '@nestjs/testing';
import { beforeEach, describe, expect, it, vi } from 'vitest';
import { RCloneStatusService } from '@app/unraid-api/graph/resolvers/rclone/rclone-status.service.js';
import { RCloneJobStats, RCloneJobStatus } from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
import { BackupJobStatus, RCloneJobStats } from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
import { FormatService } from '@app/unraid-api/utils/format.service.js';
// Mock NestJS Logger to suppress logs during tests
@@ -178,7 +178,7 @@ describe('RCloneStatusService', () => {
success: true,
error: undefined,
progressPercentage: 100,
status: RCloneJobStatus.COMPLETED,
status: BackupJobStatus.COMPLETED,
hasRecentJob: true,
});
});
@@ -203,7 +203,7 @@ describe('RCloneStatusService', () => {
success: true,
error: undefined,
progressPercentage: 60,
status: RCloneJobStatus.RUNNING,
status: BackupJobStatus.RUNNING,
hasRecentJob: true,
});
});
@@ -229,7 +229,7 @@ describe('RCloneStatusService', () => {
success: false,
error: 'Connection timeout',
progressPercentage: 0,
status: RCloneJobStatus.ERROR,
status: BackupJobStatus.FAILED,
hasRecentJob: true,
});
});
@@ -255,7 +255,7 @@ describe('RCloneStatusService', () => {
success: false,
error: 'context canceled',
progressPercentage: 0,
status: RCloneJobStatus.CANCELLED,
status: BackupJobStatus.CANCELLED,
hasRecentJob: true,
});
});

View File

@@ -1,10 +1,10 @@
import { Injectable, Logger } from '@nestjs/common';
import {
BackupJobStatus,
RCloneJob,
RCloneJobListResponse,
RCloneJobStats,
RCloneJobStatus,
RCloneJobWithStats,
} from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
import { FormatService } from '@app/unraid-api/utils/format.service.js';
@@ -104,7 +104,7 @@ export class RCloneStatusService {
this.logger.debug(`Stats for job ${jobId}: %o`, stats);
const group = stats.group || undefined;
this.logger.debug(`Processing job ${jobId}: group="${group}", stats: ${JSON.stringify(stats)}`);
this.logger.debug(`Processing job ${jobId}: group="${group}"`);
const isFinished =
stats.fatalError === false &&
@@ -115,18 +115,18 @@ export class RCloneStatusService {
const isCancelled = stats.lastError === 'context canceled';
// Determine status
let status: RCloneJobStatus;
let status: BackupJobStatus;
if (hasError) {
if (isCancelled) {
status = RCloneJobStatus.CANCELLED;
status = BackupJobStatus.CANCELLED;
} else {
status = RCloneJobStatus.ERROR;
status = BackupJobStatus.FAILED;
}
} else if (isFinished || stats.calculatedPercentage === 100) {
status = RCloneJobStatus.COMPLETED;
status = BackupJobStatus.COMPLETED;
} else {
status = RCloneJobStatus.RUNNING;
status = BackupJobStatus.RUNNING;
}
return {

View File

@@ -339,8 +339,8 @@ export class RCloneJob {
@Field(() => PrefixedID, { description: 'Configuration ID that triggered this job', nullable: true })
configId?: string;
@Field(() => RCloneJobStatus, { description: 'Current status of the job', nullable: true })
status?: RCloneJobStatus;
@Field(() => BackupJobStatus, { description: 'Current status of the job', nullable: true })
status?: BackupJobStatus;
@Field(() => Boolean, { description: 'Whether the job is finished', nullable: true })
finished?: boolean;
@@ -407,14 +407,14 @@ export interface RCloneJobsWithStatsResponse {
stats: RCloneJobStats[];
}
export enum RCloneJobStatus {
export enum BackupJobStatus {
RUNNING = 'Running',
COMPLETED = 'Completed',
ERROR = 'Error',
FAILED = 'Failed',
CANCELLED = 'Cancelled',
}
registerEnumType(RCloneJobStatus, {
name: 'RCloneJobStatus',
description: 'Status of an RClone job',
registerEnumType(BackupJobStatus, {
name: 'BackupJobStatus',
description: 'Status of a backup job',
});

View File

@@ -1,5 +1,6 @@
import { Module } from '@nestjs/common';
import { forwardRef, Module } from '@nestjs/common';
import { PreprocessingModule } from '@app/unraid-api/graph/resolvers/backup/preprocessing/preprocessing.module.js';
import { RCloneApiService } from '@app/unraid-api/graph/resolvers/rclone/rclone-api.service.js';
import { RCloneFormService } from '@app/unraid-api/graph/resolvers/rclone/rclone-form.service.js';
import { RCloneStatusService } from '@app/unraid-api/graph/resolvers/rclone/rclone-status.service.js';
@@ -9,7 +10,7 @@ import { RCloneService } from '@app/unraid-api/graph/resolvers/rclone/rclone.ser
import { UtilsModule } from '@app/unraid-api/utils/utils.module.js';
@Module({
imports: [UtilsModule],
imports: [UtilsModule, forwardRef(() => PreprocessingModule)],
providers: [
RCloneService,
RCloneApiService,

View File

@@ -13,6 +13,7 @@ import {
CreateRCloneRemoteInput,
DeleteRCloneRemoteInput,
RCloneRemote,
RCloneRemoteConfig,
} from '@app/unraid-api/graph/resolvers/rclone/rclone.model.js';
/**
@@ -37,7 +38,7 @@ export class RCloneMutationsResolver {
name: input.name,
type: input.type,
parameters: {},
config,
config: config as RCloneRemoteConfig,
};
} catch (error) {
this.logger.error(`Error creating remote: ${error}`);

View File

@@ -18,7 +18,8 @@ export async function bootstrapNestServer(): Promise<NestFastifyApplication> {
const app = await NestFactory.create<NestFastifyApplication>(AppModule, new FastifyAdapter(), {
bufferLogs: false,
...(LOG_LEVEL !== 'TRACE' ? { logger: false } : {}),
...(LOG_LEVEL !== 'DEBUG' ? { logger: false } : {}),
});
// Enable validation globally

View File

@@ -0,0 +1,60 @@
<script lang="ts" setup>
import { jsonFormsRenderers } from '@/forms/renderers';
import type {
JsonFormsCellRendererRegistryEntry,
JsonFormsI18nState,
JsonFormsRendererRegistryEntry,
JsonFormsUISchemaRegistryEntry,
JsonSchema,
Middleware,
UISchemaElement,
ValidationMode,
} from '@jsonforms/core';
import { JsonForms as BaseJsonForms } from '@jsonforms/vue';
import type { Ref } from 'vue';
const props = withDefaults(
defineProps<{
schema: JsonSchema;
uischema?: UISchemaElement;
data: Ref<Record<string, unknown>> | Record<string, unknown>;
renderers?: JsonFormsRendererRegistryEntry[];
cells?: JsonFormsCellRendererRegistryEntry[];
config?: unknown;
readonly?: boolean;
uischemas?: JsonFormsUISchemaRegistryEntry[];
validationMode?: ValidationMode;
middleware?: Middleware;
i18n?: JsonFormsI18nState;
}>(),
{
renderers: () => jsonFormsRenderers,
config: () => ({ restrict: false, trim: false, useDefaults: true }),
validationMode: 'ValidateAndShow',
}
);
const emit = defineEmits(['change']);
function onChange(event: unknown): void {
emit('change', event);
}
</script>
<template>
<BaseJsonForms
:schema="props.schema"
:uischema="props.uischema"
:data="props.data"
:renderers="props.renderers"
:cells="props.cells"
:config="props.config"
:readonly="props.readonly"
:uischemas="props.uischemas"
:validation-mode="props.validationMode"
:ajv="undefined"
:middleware="props.middleware"
:i18n="props.i18n"
:additional-errors="undefined"
@change="onChange"
/>
</template>

View File

@@ -43,7 +43,7 @@ const onSelectOpen = () => {
<template>
<!-- The ControlWrapper now handles the v-if based on control.visible -->
<Select
v-model="selected"
:model-value="selected"
:disabled="!control.enabled"
:required="control.required"
@update:model-value="onChange"

View File

@@ -65,8 +65,8 @@ const updateStep = (newStep: number) => {
if (newStep < 0 || newStep >= numSteps.value) {
return;
}
// Make total zero-indexed
const total = numSteps.value > 0 ? numSteps.value - 1 : 0;
// Total should be the actual number of steps, not zero-indexed
const total = numSteps.value;
// Update the 'configStep' property in the JSON Forms data with the new object structure
dispatch(Actions.update('configStep', () => ({ current: newStep, total })));
};

View File

@@ -6,6 +6,7 @@ export * from '@/components';
// JsonForms
export * from '@/forms/renderers';
export { default as JsonForms } from '@/forms/JsonForms.vue';
// Lib
export * from '@/lib/utils';

View File

@@ -1,8 +1,8 @@
<script setup lang="ts">
import type { BackupJobsQuery } from '~/composables/gql/graphql';
import { RCloneJobStatus } from '~/composables/gql/graphql';
import { BackupJobStatus } from '~/composables/gql/graphql';
import { useFragment } from '~/composables/gql/fragment-masking';
import { BACKUP_STATS_FRAGMENT } from './backup-jobs.query';
import { BACKUP_STATS_FRAGMENT, RCLONE_JOB_FRAGMENT } from './backup-jobs.query';
import { computed } from 'vue';
interface Props {
@@ -11,7 +11,8 @@ interface Props {
const props = defineProps<Props>();
const stats = useFragment(BACKUP_STATS_FRAGMENT, props.job.stats);
const jobData = useFragment(RCLONE_JOB_FRAGMENT, props.job);
const stats = useFragment(BACKUP_STATS_FRAGMENT, jobData.stats);
// Calculate percentage if it's null but we have bytes and totalBytes
const calculatedPercentage = computed(() => {
@@ -26,23 +27,23 @@ const calculatedPercentage = computed(() => {
// Determine job status based on job properties
const jobStatus = computed(() => {
if (props.job.status) {
return props.job.status;
if (jobData.status) {
return jobData.status;
}
if (props.job.error) return RCloneJobStatus.ERROR;
if (props.job.finished && props.job.success) return RCloneJobStatus.COMPLETED;
if (props.job.finished && !props.job.success) return RCloneJobStatus.ERROR;
return RCloneJobStatus.RUNNING;
if (jobData.error) return BackupJobStatus.FAILED;
if (jobData.finished && jobData.success) return BackupJobStatus.COMPLETED;
if (jobData.finished && !jobData.success) return BackupJobStatus.FAILED;
return BackupJobStatus.RUNNING;
});
const statusColor = computed(() => {
switch (jobStatus.value) {
case RCloneJobStatus.ERROR:
case RCloneJobStatus.CANCELLED:
case BackupJobStatus.FAILED:
case BackupJobStatus.CANCELLED:
return 'red';
case RCloneJobStatus.COMPLETED:
case BackupJobStatus.COMPLETED:
return 'green';
case RCloneJobStatus.RUNNING:
case BackupJobStatus.RUNNING:
default:
return 'blue';
}
@@ -50,13 +51,13 @@ const statusColor = computed(() => {
const statusText = computed(() => {
switch (jobStatus.value) {
case RCloneJobStatus.ERROR:
case BackupJobStatus.FAILED:
return 'Error';
case RCloneJobStatus.CANCELLED:
case BackupJobStatus.CANCELLED:
return 'Cancelled';
case RCloneJobStatus.COMPLETED:
case BackupJobStatus.COMPLETED:
return 'Completed';
case RCloneJobStatus.RUNNING:
case BackupJobStatus.RUNNING:
default:
return 'Running';
}
@@ -72,7 +73,7 @@ const statusText = computed(() => {
:class="[
'w-3 h-3 rounded-full',
statusColor === 'green' ? 'bg-green-400' : statusColor === 'red' ? 'bg-red-400' : 'bg-blue-400',
jobStatus === RCloneJobStatus.RUNNING ? 'animate-pulse' : ''
jobStatus === BackupJobStatus.RUNNING ? 'animate-pulse' : ''
]"
></div>
</div>
@@ -81,11 +82,11 @@ const statusText = computed(() => {
Backup Job
</h3>
<div class="text-sm text-gray-500 dark:text-gray-400 space-y-1">
<p>Job ID: {{ job.id }}</p>
<p v-if="job.configId">Config ID: {{ job.configId }}</p>
<p v-if="job.group">Group: {{ job.group }}</p>
<p>Job ID: {{ jobData.id }}</p>
<p v-if="jobData.configId">Config ID: {{ jobData.configId }}</p>
<p v-if="jobData.group">Group: {{ jobData.group }}</p>
<p>Status: {{ statusText }}</p>
<p v-if="job.error" class="text-red-600 dark:text-red-400">Error: {{ job.error }}</p>
<p v-if="jobData.error" class="text-red-600 dark:text-red-400">Error: {{ jobData.error }}</p>
</div>
</div>
</div>

View File

@@ -1,5 +1,5 @@
<script setup lang="ts">
import { computed, ref } from 'vue';
import { ref, computed } from 'vue';
import { useQuery } from '@vue/apollo-composable';
import { Button, Sheet, SheetContent, SheetTitle, Spinner } from '@unraid/ui';
@@ -15,7 +15,7 @@ const { result, loading, error, refetch } = useQuery(
{},
{
fetchPolicy: 'cache-and-network',
pollInterval: 30000, // Much slower polling since we only need the list of configs
pollInterval: 50000, // Much slower polling since we only need the list of configs
}
);
@@ -23,6 +23,10 @@ const backupConfigIds = computed(() => {
return result.value?.backup?.configs?.map((config) => config.id) || [];
});
function handleJobDeleted() {
refetch();
}
function onConfigComplete() {
showConfigModal.value = false;
refetch();
@@ -76,7 +80,12 @@ function onConfigComplete() {
</div>
<div v-else class="space-y-4">
<BackupJobItem v-for="configId in backupConfigIds" :key="configId" :config-id="configId" />
<BackupJobItem
v-for="configId in backupConfigIds"
:key="configId"
:config-id="configId"
@deleted="handleJobDeleted"
/>
</div>
<Sheet v-model:open="showConfigModal">

View File

@@ -1,10 +1,174 @@
<script lang="ts" setup>
import { computed, provide, ref, watch } from 'vue';
import { useMutation, useQuery } from '@vue/apollo-composable';
import type { Ref } from 'vue';
import { Button, JsonForms } from '@unraid/ui';
import { CREATE_BACKUP_JOB_CONFIG_MUTATION, BACKUP_JOB_CONFIG_FORM_QUERY } from './backup-jobs.query';
import type { CreateBackupJobConfigInput, InputMaybe, PreprocessConfigInput, BackupMode } from '~/composables/gql/graphql';
// Define emit events
const emit = defineEmits<{
complete: []
cancel: []
}>()
// Define types for form state
interface ConfigStep {
current: number;
total: number;
}
// Form state
const formState: Ref<Record<string, unknown>> = ref({});
// Get form schema
const {
result: formResult,
loading: formLoading,
refetch: updateFormSchema,
} = useQuery(BACKUP_JOB_CONFIG_FORM_QUERY, {
input: {
showAdvanced: typeof formState.value?.showAdvanced === 'boolean' ? formState.value.showAdvanced : false,
},
});
// Watch for changes to showAdvanced and refetch schema
let refetchTimeout: NodeJS.Timeout | null = null;
watch(
formState,
async (newValue, oldValue) => {
const newStepCurrent = typeof (newValue?.configStep) === 'object' ? (newValue.configStep as ConfigStep).current : (newValue?.configStep as number);
const oldStepCurrent = typeof (oldValue?.configStep) === 'object' ? (oldValue.configStep as ConfigStep).current : (oldValue?.configStep as number);
const newShowAdvanced = typeof newValue?.showAdvanced === 'boolean' ? newValue.showAdvanced : false;
const oldShowAdvanced = typeof oldValue?.showAdvanced === 'boolean' ? oldValue.showAdvanced : false;
const shouldRefetch = newShowAdvanced !== oldShowAdvanced || newStepCurrent !== oldStepCurrent;
if (shouldRefetch) {
if (newShowAdvanced !== oldShowAdvanced) {
console.log('[BackupJobConfigForm] showAdvanced changed:', newShowAdvanced);
}
if (newStepCurrent !== oldStepCurrent) {
console.log('[BackupJobConfigForm] configStep.current changed:', newStepCurrent, 'from:', oldStepCurrent, 'Refetching schema.');
}
if (refetchTimeout) {
clearTimeout(refetchTimeout);
}
refetchTimeout = setTimeout(async () => {
await updateFormSchema({
input: {
showAdvanced: newShowAdvanced,
},
});
refetchTimeout = null;
}, 100);
}
},
{ deep: true }
);
/**
* Form submission and mutation handling
*/
const {
mutate: createBackupJobConfig,
loading: isCreating,
error: createError,
onDone: onCreateDone,
} = useMutation(CREATE_BACKUP_JOB_CONFIG_MUTATION);
// Handle form submission
const submitForm = async () => {
try {
const value = formState.value as Record<string, unknown>;
console.log('value', value);
console.log('[BackupJobConfigForm] submitForm', value);
const input: CreateBackupJobConfigInput = {
name: value?.name as string,
destinationPath: value?.destinationPath as string,
schedule: (value?.schedule as string) || '',
enabled: value?.enabled as boolean,
remoteName: value?.remoteName as string,
sourcePath: (value?.sourcePath as string) || '',
rcloneOptions: value?.rcloneOptions as Record<string, unknown>,
preprocessConfig: value?.preprocessConfig as InputMaybe<PreprocessConfigInput> | undefined,
backupMode: (value?.backupMode as BackupMode) || 'RAW' as BackupMode,
};
await createBackupJobConfig({
input,
});
} catch (error) {
console.error('Error creating backup job config:', error);
}
};
// Handle successful creation
onCreateDone(async ({ data }) => {
if (window.toast) {
window.toast.success('Backup Job Created', {
description: `Successfully created backup job "${formState.value?.name as string}"`,
});
}
console.log('[BackupJobConfigForm] onCreateDone', data);
formState.value = {};
emit('complete');
});
const parsedOriginalErrorMessage = computed(() => {
const originalError = createError.value?.graphQLErrors?.[0]?.extensions?.originalError;
if (originalError && typeof originalError === 'object' && originalError !== null && 'message' in originalError) {
return (originalError as { message: string | string[] }).message;
}
return undefined;
});
const onChange = ({ data }: { data: unknown }) => {
console.log('[BackupJobConfigForm] onChange', data);
formState.value = data as Record<string, unknown>;
};
// --- Submit Button Logic ---
const uiSchema = computed(() => formResult.value?.backupJobConfigForm?.uiSchema);
// Handle both number and object formats of configStep
const getCurrentStep = computed(() => {
const step = formState.value?.configStep;
return typeof step === 'object' ? (step as ConfigStep).current : step as number;
});
// Get total steps from UI schema
const numSteps = computed(() => {
if (uiSchema.value?.type === 'SteppedLayout') {
return uiSchema.value?.options?.steps?.length ?? 0;
} else if (uiSchema.value?.elements?.[0]?.type === 'SteppedLayout') {
return uiSchema.value?.elements[0].options?.steps?.length ?? 0;
}
return 0;
});
const isLastStep = computed(() => {
if (numSteps.value === 0) return false;
return getCurrentStep.value === numSteps.value - 1;
});
// --- Provide submission logic to SteppedLayout ---
provide('submitForm', submitForm);
provide('isSubmitting', isCreating);
</script>
<template>
<div class="bg-white dark:bg-gray-800 rounded-lg border border-gray-200 dark:border-gray-700 shadow-sm">
<div class="p-6">
<h2 class="text-xl font-medium mb-4 text-gray-900 dark:text-white">Configure Backup Job</h2>
<div v-if="createError" class="mb-4 p-4 bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 text-red-700 dark:text-red-300 rounded-md">
{{ createError.message }}
<p>{{ createError.message }}</p>
<ul v-if="Array.isArray(parsedOriginalErrorMessage)" class="list-disc list-inside mt-2">
<li v-for="(msg, index) in parsedOriginalErrorMessage" :key="index">{{ msg }}</li>
</ul>
<p v-else-if="typeof parsedOriginalErrorMessage === 'string' && parsedOriginalErrorMessage.length > 0" class="mt-2">
{{ parsedOriginalErrorMessage }}
</p>
</div>
<div v-if="formLoading" class="py-8 text-center text-gray-500 dark:text-gray-400">Loading configuration form...</div>
@@ -15,9 +179,7 @@
v-if="formResult?.backupJobConfigForm"
:schema="formResult.backupJobConfigForm.dataSchema"
:uischema="formResult.backupJobConfigForm.uiSchema"
:renderers="renderers"
:data="formState"
:config="jsonFormsConfig"
:readonly="isCreating"
@change="onChange"
/>
@@ -52,187 +214,6 @@
</div>
</template>
<script lang="ts" setup>
import { computed, provide, ref, watch } from 'vue';
import { useMutation, useQuery } from '@vue/apollo-composable';
import { Button, jsonFormsRenderers } from '@unraid/ui';
import { JsonForms } from '@jsonforms/vue';
import { CREATE_BACKUP_JOB_CONFIG_MUTATION, BACKUP_JOB_CONFIG_FORM_QUERY } from './backup-jobs.query';
// Define emit events
const emit = defineEmits<{
complete: []
cancel: []
}>()
// Define types for form state
interface ConfigStep {
current: number;
total: number;
}
// Form state
const formState = ref({
configStep: 0 as number | ConfigStep,
showAdvanced: false,
name: '',
sourcePath: '',
remoteName: '',
destinationPath: '',
schedule: '0 2 * * *',
enabled: true,
rcloneOptions: {},
});
// Get form schema
const {
result: formResult,
loading: formLoading,
refetch: updateFormSchema,
} = useQuery(BACKUP_JOB_CONFIG_FORM_QUERY, {
input: {
showAdvanced: formState.value.showAdvanced || false,
},
});
// Watch for changes to showAdvanced and refetch schema
let refetchTimeout: NodeJS.Timeout | null = null;
watch(
formState,
async (newValue, oldValue) => {
if (newValue.showAdvanced !== oldValue.showAdvanced) {
console.log('[BackupJobConfigForm] showAdvanced changed:', newValue.showAdvanced);
// Debounce refetch to prevent multiple rapid calls
if (refetchTimeout) {
clearTimeout(refetchTimeout);
}
refetchTimeout = setTimeout(async () => {
await updateFormSchema({
input: {
showAdvanced: newValue.showAdvanced,
},
});
refetchTimeout = null;
}, 100);
}
},
{ deep: true }
);
/**
* Form submission and mutation handling
*/
const {
mutate: createBackupJobConfig,
loading: isCreating,
error: createError,
onDone: onCreateDone,
} = useMutation(CREATE_BACKUP_JOB_CONFIG_MUTATION);
// Handle form submission
const submitForm = async () => {
try {
await createBackupJobConfig({
input: {
name: formState.value.name,
sourcePath: formState.value.sourcePath,
remoteName: formState.value.remoteName,
destinationPath: formState.value.destinationPath,
schedule: formState.value.schedule,
enabled: formState.value.enabled,
rcloneOptions: formState.value.rcloneOptions,
},
});
} catch (error) {
console.error('Error creating backup job config:', error);
}
};
// Handle successful creation
onCreateDone(async ({ data }) => {
// Show success message
if (window.toast) {
window.toast.success('Backup Job Created', {
description: `Successfully created backup job "${formState.value.name}"`,
});
}
console.log('[BackupJobConfigForm] onCreateDone', data);
// Reset form and emit complete event
formState.value = {
configStep: 0,
showAdvanced: false,
name: '',
sourcePath: '',
remoteName: '',
destinationPath: '',
schedule: '0 2 * * *',
enabled: true,
rcloneOptions: {},
};
emit('complete');
});
// Set up JSONForms config
const jsonFormsConfig = {
restrict: false,
trim: false,
};
const renderers = [...jsonFormsRenderers];
// Handle form data changes with debouncing to reduce excessive logging
let changeTimeout: NodeJS.Timeout | null = null;
const onChange = ({ data }: { data: Record<string, unknown> }) => {
// Clear any pending timeout
if (changeTimeout) {
clearTimeout(changeTimeout);
}
// Log changes but debounce to reduce console spam
changeTimeout = setTimeout(() => {
console.log('[BackupJobConfigForm] onChange received data:', JSON.stringify(data));
changeTimeout = null;
}, 300);
// Update formState
formState.value = data as typeof formState.value;
};
// --- Submit Button Logic ---
const uiSchema = computed(() => formResult.value?.backupJobConfigForm?.uiSchema);
// Handle both number and object formats of configStep
const getCurrentStep = computed(() => {
const step = formState.value.configStep;
return typeof step === 'object' ? (step as ConfigStep).current : step as number;
});
// Get total steps from UI schema
const numSteps = computed(() => {
if (uiSchema.value?.type === 'SteppedLayout') {
return uiSchema.value?.options?.steps?.length ?? 0;
} else if (uiSchema.value?.elements?.[0]?.type === 'SteppedLayout') {
return uiSchema.value?.elements[0].options?.steps?.length ?? 0;
}
return 0;
});
const isLastStep = computed(() => {
if (numSteps.value === 0) return false;
return getCurrentStep.value === numSteps.value - 1;
});
// --- Provide submission logic to SteppedLayout ---
provide('submitForm', submitForm);
provide('isSubmitting', isCreating);
</script>
<style lang="postcss">
/* Import unraid-ui globals first */

View File

@@ -1,19 +1,24 @@
<script setup lang="ts">
import { computed, ref, watch } from 'vue';
import { useQuery, useMutation } from '@vue/apollo-composable';
import { PlayIcon, StopIcon } from '@heroicons/vue/24/solid';
import { useMutation, useQuery } from '@vue/apollo-composable';
import { PlayIcon, StopIcon, TrashIcon } from '@heroicons/vue/24/solid';
import { Badge, Button, Switch } from '@unraid/ui';
import {
import {
BACKUP_JOB_CONFIG_FRAGMENT,
BACKUP_JOB_CONFIG_QUERY,
BACKUP_JOB_CONFIG_WITH_CURRENT_JOB_FRAGMENT,
BACKUP_JOB_CONFIG_FRAGMENT,
RCLONE_JOB_FRAGMENT,
BACKUP_STATS_FRAGMENT,
PREPROCESS_CONFIG_FRAGMENT,
RCLONE_JOB_FRAGMENT,
STOP_BACKUP_JOB_MUTATION,
TOGGLE_BACKUP_JOB_CONFIG_MUTATION,
TRIGGER_BACKUP_JOB_MUTATION,
STOP_BACKUP_JOB_MUTATION
DELETE_BACKUP_JOB_CONFIG_MUTATION,
} from '~/components/Backup/backup-jobs.query';
import { useFragment } from '~/composables/gql/fragment-masking';
import { BackupJobStatus } from '~/composables/gql/graphql';
interface Props {
configId: string;
@@ -26,59 +31,81 @@ if (!props.configId) {
console.warn('BackupJobItem: configId prop is required but not provided');
}
const emit = defineEmits(['deleted']);
const isToggling = ref(false);
const isTriggering = ref(false);
const showDeleteConfirm = ref(false);
// Add reactive variables for the query
const queryVariables = computed(() => ({ id: props.configId }));
const { result, loading, error, refetch } = useQuery(
BACKUP_JOB_CONFIG_QUERY,
queryVariables,
{
fetchPolicy: 'cache-and-network',
pollInterval: 5000,
errorPolicy: 'all', // Show partial data even if there are errors
}
);
const { result, loading, error, refetch } = useQuery(BACKUP_JOB_CONFIG_QUERY, queryVariables, {
fetchPolicy: 'cache-and-network',
pollInterval: 50000,
errorPolicy: 'all', // Show partial data even if there are errors
});
// Add debugging to see what's happening
watch([result, error, loading], ([newResult, newError, newLoading]) => {
console.log('BackupJobItem query state:', {
configId: props.configId,
loading: newLoading,
error: newError,
result: newResult,
backupJobConfig: newResult?.backupJobConfig
});
}, { immediate: true });
watch(
[result, error, loading],
([newResult, newError, newLoading]) => {
console.log('BackupJobItem query state:', {
configId: props.configId,
loading: newLoading,
error: newError,
result: newResult,
backupJobConfig: newResult?.backupJobConfig,
});
},
{ immediate: true }
);
// Watch for configId changes and refetch
watch(() => props.configId, (newConfigId) => {
if (newConfigId) {
console.log('ConfigId changed, refetching:', newConfigId);
refetch();
watch(
() => props.configId,
(newConfigId) => {
if (newConfigId) {
console.log('ConfigId changed, refetching:', newConfigId);
refetch();
// Reset delete confirmation when configId changes
showDeleteConfirm.value = false;
}
}
});
);
const { mutate: toggleJobConfig } = useMutation(TOGGLE_BACKUP_JOB_CONFIG_MUTATION);
const { mutate: triggerJob } = useMutation(TRIGGER_BACKUP_JOB_MUTATION);
const { mutate: stopJob } = useMutation(STOP_BACKUP_JOB_MUTATION);
const { mutate: deleteJobConfig, loading: isDeletingJob } = useMutation(
DELETE_BACKUP_JOB_CONFIG_MUTATION
);
const configWithJob = computed(() => {
if (!result.value?.backupJobConfig) {
console.log('No backupJobConfig in result:', result.value);
return null;
}
try {
const config = useFragment(BACKUP_JOB_CONFIG_WITH_CURRENT_JOB_FRAGMENT, result.value.backupJobConfig);
const config = useFragment(
BACKUP_JOB_CONFIG_WITH_CURRENT_JOB_FRAGMENT,
result.value.backupJobConfig
);
const baseConfig = useFragment(BACKUP_JOB_CONFIG_FRAGMENT, config);
const currentJob = config.currentJob ? useFragment(RCLONE_JOB_FRAGMENT, config.currentJob) : undefined;
const jobStats = currentJob?.stats ? useFragment(BACKUP_STATS_FRAGMENT, currentJob.stats) : undefined;
const currentJob = config.currentJob
? useFragment(RCLONE_JOB_FRAGMENT, config.currentJob)
: undefined;
const jobStats = currentJob?.stats
? useFragment(BACKUP_STATS_FRAGMENT, currentJob.stats)
: undefined;
const preprocessConfig = baseConfig.preprocessConfig
? useFragment(PREPROCESS_CONFIG_FRAGMENT, baseConfig.preprocessConfig)
: undefined;
return {
...baseConfig,
preprocessConfig,
runningJob: currentJob,
jobStats,
errorMessage: currentJob?.error || undefined,
@@ -97,7 +124,7 @@ function formatDate(dateString: string): string {
async function handleToggleJob() {
if (!configWithJob.value || isToggling.value) return;
isToggling.value = true;
try {
await toggleJobConfig({ id: configWithJob.value.id });
@@ -110,11 +137,11 @@ async function handleToggleJob() {
async function handleTriggerOrStopJob() {
if (!configWithJob.value || isTriggering.value) return;
isTriggering.value = true;
try {
if (configWithJob.value.isRunning && configWithJob.value.runningJob?.id) {
const result = await stopJob({ jobId: configWithJob.value.runningJob.id });
const result = await stopJob({ id: configWithJob.value.runningJob.id });
if (result?.data?.backup?.stopBackupJob?.status) {
console.log('Backup job stopped:', result.data.backup.stopBackupJob);
}
@@ -130,10 +157,46 @@ async function handleTriggerOrStopJob() {
isTriggering.value = false;
}
}
async function handleDeleteJob() {
if (!configWithJob.value || isDeletingJob.value) return;
try {
const result = await deleteJobConfig({ id: configWithJob.value.id });
if (result?.data?.backup?.deleteBackupJobConfig) {
console.log('Backup job config deleted:', configWithJob.value.id);
emit('deleted', configWithJob.value.id);
showDeleteConfirm.value = false; // Close confirmation on success
} else {
console.error('Failed to delete backup job config, no confirmation in result:', result);
// Optionally, show an error message to the user here
}
} catch (error) {
console.error('Error deleting backup job config:', error);
// Optionally, show an error message to the user here
}
}
function getPreprocessingTypeLabel(type: string): string {
switch (type) {
case 'ZFS':
return 'ZFS Snapshot';
case 'FLASH':
return 'Flash Backup';
case 'SCRIPT':
return 'Custom Script';
case 'NONE':
default:
return 'None';
}
}
</script>
<template>
<div v-if="loading" class="bg-white dark:bg-gray-800 border border-gray-200 dark:border-gray-700 rounded-lg p-6 shadow-sm animate-pulse">
<div
v-if="loading"
class="bg-white dark:bg-gray-800 border border-gray-200 dark:border-gray-700 rounded-lg p-6 shadow-sm animate-pulse"
>
<div class="flex items-center justify-between mb-4">
<div class="flex items-center space-x-3">
<div class="w-3 h-3 bg-gray-300 rounded-full"></div>
@@ -149,32 +212,68 @@ async function handleTriggerOrStopJob() {
</div>
</div>
<div v-else-if="error" class="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-4">
<div
v-else-if="error"
class="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-4"
>
<p class="text-red-700 dark:text-red-300">Error loading backup job: {{ error.message }}</p>
</div>
<div v-else-if="!loading && !configWithJob" class="bg-yellow-50 dark:bg-yellow-900/20 border border-yellow-200 dark:border-yellow-800 rounded-lg p-4">
<div
v-else-if="!loading && !configWithJob"
class="bg-yellow-50 dark:bg-yellow-900/20 border border-yellow-200 dark:border-yellow-800 rounded-lg p-4"
>
<p class="text-yellow-700 dark:text-yellow-300">
Backup job configuration not found (ID: {{ configId }})
</p>
<button
class="mt-2 text-sm text-yellow-600 dark:text-yellow-400 hover:text-yellow-800 dark:hover:text-yellow-200 underline"
<button
class="mt-2 text-sm text-yellow-600 dark:text-yellow-400 hover:text-yellow-800 dark:hover:text-yellow-200 underline"
@click="refetch()"
>
Retry loading
</button>
</div>
<div v-else-if="configWithJob" class="bg-white dark:bg-gray-800 border border-gray-200 dark:border-gray-700 rounded-lg p-6 shadow-sm">
<div
v-else-if="configWithJob"
class="bg-white dark:bg-gray-800 border border-gray-200 dark:border-gray-700 rounded-lg p-6 shadow-sm relative"
>
<!-- Delete Confirmation Dialog -->
<div
v-if="showDeleteConfirm"
class="absolute inset-0 z-10 bg-white/80 dark:bg-gray-800/80 flex flex-col items-center justify-center p-6 rounded-lg"
>
<p class="text-lg font-medium text-gray-900 dark:text-white mb-4 text-center">
Are you sure you want to delete this backup job?
</p>
<p class="text-sm text-gray-600 dark:text-gray-400 mb-6 text-center">
This action cannot be undone.
</p>
<div class="flex space-x-3">
<Button variant="outline" :disabled="isDeletingJob" @click="showDeleteConfirm = false">
Cancel
</Button>
<Button variant="destructive" :disabled="isDeletingJob" @click="handleDeleteJob">
<span
v-if="isDeletingJob"
class="w-3 h-3 border border-white border-t-transparent rounded-full animate-spin mr-1"
></span>
{{ isDeletingJob ? 'Deleting...' : 'Delete' }}
</Button>
</div>
</div>
<div class="flex items-center justify-between mb-4">
<div class="flex items-center space-x-3">
<div class="flex-shrink-0">
<div
:class="[
'w-3 h-3 rounded-full',
configWithJob.runningJob?.status === 'COMPLETED'
configWithJob.runningJob?.status === BackupJobStatus.COMPLETED
? 'bg-green-400'
: configWithJob.errorMessage || configWithJob.runningJob?.status === 'CANCELLED' || configWithJob.runningJob?.status === 'ERROR'
: configWithJob.errorMessage ||
configWithJob.runningJob?.status === BackupJobStatus.CANCELLED ||
configWithJob.runningJob?.status === BackupJobStatus.FAILED
? 'bg-red-400'
: configWithJob.isRunning
? 'bg-blue-400 animate-pulse'
@@ -187,14 +286,11 @@ async function handleTriggerOrStopJob() {
<div>
<h3 class="text-lg font-medium text-gray-900 dark:text-white">
{{ configWithJob.name }}
<span
v-if="configWithJob.isRunning"
class="text-sm text-blue-600 dark:text-blue-400 ml-2"
>
<span v-if="configWithJob.isRunning" class="text-sm text-blue-600 dark:text-blue-400 ml-2">
(Running)
</span>
<span
v-else-if="configWithJob.runningJob?.status === 'COMPLETED'"
v-else-if="configWithJob.runningJob?.status === BackupJobStatus.COMPLETED"
class="text-sm text-green-600 dark:text-green-400 ml-2"
>
(Completed)
@@ -210,11 +306,14 @@ async function handleTriggerOrStopJob() {
{{ configWithJob.sourcePath }} {{ configWithJob.remoteName }}:{{
configWithJob.destinationPath
}}
<span
v-if="configWithJob.preprocessConfig && configWithJob.preprocessConfig.type !== 'NONE'"
class="ml-2 inline-flex items-center px-2 py-0.5 rounded text-xs font-medium bg-blue-100 text-blue-800 dark:bg-blue-900/20 dark:text-blue-400"
>
{{ getPreprocessingTypeLabel(configWithJob.preprocessConfig.type) }}
</span>
</p>
<p
v-if="configWithJob.errorMessage"
class="text-sm text-red-600 dark:text-red-400 mt-1"
>
<p v-if="configWithJob.errorMessage" class="text-sm text-red-600 dark:text-red-400 mt-1">
Error: {{ configWithJob.errorMessage }}
</p>
</div>
@@ -226,13 +325,13 @@ async function handleTriggerOrStopJob() {
</span>
<Switch
:checked="configWithJob.enabled"
:disabled="isToggling || configWithJob.isRunning"
:disabled="isToggling || configWithJob.isRunning || showDeleteConfirm"
@update:checked="handleToggleJob"
/>
</div>
<Button
:disabled="isTriggering"
:disabled="isTriggering || showDeleteConfirm"
:variant="!isTriggering ? 'primary' : 'outline'"
size="sm"
@click="handleTriggerOrStopJob"
@@ -254,11 +353,22 @@ async function handleTriggerOrStopJob() {
}}
</Button>
<Button
variant="destructive"
size="sm"
:disabled="isDeletingJob || configWithJob.isRunning || showDeleteConfirm"
@click="showDeleteConfirm = true"
>
<TrashIcon class="w-4 h-4" />
</Button>
<Badge
:variant="
configWithJob.runningJob?.status === 'COMPLETED'
configWithJob.runningJob?.status === BackupJobStatus.COMPLETED
? 'green'
: configWithJob.errorMessage || configWithJob.runningJob?.status === 'CANCELLED' || configWithJob.runningJob?.status === 'ERROR'
: configWithJob.errorMessage ||
configWithJob.runningJob?.status === BackupJobStatus.CANCELLED ||
configWithJob.runningJob?.status === BackupJobStatus.FAILED
? 'red'
: configWithJob.isRunning
? 'blue'
@@ -270,7 +380,8 @@ async function handleTriggerOrStopJob() {
>
{{
configWithJob.hasRecentJob && configWithJob.runningJob?.status
? configWithJob.runningJob.status.charAt(0).toUpperCase() + configWithJob.runningJob.status.slice(1).toLowerCase()
? configWithJob.runningJob.status.charAt(0).toUpperCase() +
configWithJob.runningJob.status.slice(1).toLowerCase()
: configWithJob.enabled
? 'Active'
: 'Inactive'
@@ -284,9 +395,11 @@ async function handleTriggerOrStopJob() {
v-if="configWithJob.hasRecentJob && configWithJob.jobStats"
:class="[
'mb-4 border rounded-lg p-4',
configWithJob.runningJob?.status === 'COMPLETED'
configWithJob.runningJob?.status === BackupJobStatus.COMPLETED
? 'bg-green-50 dark:bg-green-900/20 border-green-200 dark:border-green-800'
: configWithJob.errorMessage || configWithJob.runningJob?.status === 'CANCELLED' || configWithJob.runningJob?.status === 'ERROR'
: configWithJob.errorMessage ||
configWithJob.runningJob?.status === BackupJobStatus.CANCELLED ||
configWithJob.runningJob?.status === BackupJobStatus.FAILED
? 'bg-red-50 dark:bg-red-900/20 border-red-200 dark:border-red-800'
: 'bg-blue-50 dark:bg-blue-900/20 border-blue-200 dark:border-blue-800',
]"
@@ -294,9 +407,11 @@ async function handleTriggerOrStopJob() {
<div
:class="[
'flex justify-between text-sm mb-3',
configWithJob.runningJob?.status === 'COMPLETED'
configWithJob.runningJob?.status === BackupJobStatus.COMPLETED
? 'text-green-700 dark:text-green-300'
: configWithJob.errorMessage || configWithJob.runningJob?.status === 'CANCELLED' || configWithJob.runningJob?.status === 'ERROR'
: configWithJob.errorMessage ||
configWithJob.runningJob?.status === BackupJobStatus.CANCELLED ||
configWithJob.runningJob?.status === BackupJobStatus.FAILED
? 'text-red-700 dark:text-red-300'
: 'text-blue-700 dark:text-blue-300',
]"
@@ -307,9 +422,11 @@ async function handleTriggerOrStopJob() {
<div
:class="[
'w-full rounded-full h-2 mb-3',
configWithJob.runningJob?.status === 'COMPLETED'
configWithJob.runningJob?.status === BackupJobStatus.COMPLETED
? 'bg-green-200 dark:bg-green-700'
: configWithJob.errorMessage || configWithJob.runningJob?.status === 'CANCELLED' || configWithJob.runningJob?.status === 'ERROR'
: configWithJob.errorMessage ||
configWithJob.runningJob?.status === BackupJobStatus.CANCELLED ||
configWithJob.runningJob?.status === BackupJobStatus.FAILED
? 'bg-red-200 dark:bg-red-700'
: 'bg-blue-200 dark:bg-blue-700',
]"
@@ -317,9 +434,11 @@ async function handleTriggerOrStopJob() {
<div
:class="[
'h-2 rounded-full transition-all duration-300',
configWithJob.runningJob?.status === 'COMPLETED'
configWithJob.runningJob?.status === BackupJobStatus.COMPLETED
? 'bg-green-600'
: configWithJob.errorMessage || configWithJob.runningJob?.status === 'CANCELLED' || configWithJob.runningJob?.status === 'ERROR'
: configWithJob.errorMessage ||
configWithJob.runningJob?.status === BackupJobStatus.CANCELLED ||
configWithJob.runningJob?.status === BackupJobStatus.FAILED
? 'bg-red-600'
: 'bg-blue-600',
]"
@@ -329,9 +448,11 @@ async function handleTriggerOrStopJob() {
<div
:class="[
'grid grid-cols-2 md:grid-cols-4 gap-4 text-sm',
configWithJob.runningJob?.status === 'COMPLETED'
configWithJob.runningJob?.status === BackupJobStatus.COMPLETED
? 'text-green-700 dark:text-green-300'
: configWithJob.errorMessage || configWithJob.runningJob?.status === 'CANCELLED' || configWithJob.runningJob?.status === 'ERROR'
: configWithJob.errorMessage ||
configWithJob.runningJob?.status === BackupJobStatus.CANCELLED ||
configWithJob.runningJob?.status === BackupJobStatus.FAILED
? 'text-red-700 dark:text-red-300'
: 'text-blue-700 dark:text-blue-300',
]"
@@ -339,20 +460,27 @@ async function handleTriggerOrStopJob() {
<div>
<span class="font-medium">Transferred:</span> {{ configWithJob.jobStats.formattedBytes }}
</div>
<div><span class="font-medium">Elapsed:</span> {{ configWithJob.jobStats.formattedElapsedTime }}</div>
<div v-if="configWithJob.runningJob?.status === 'RUNNING'">
<div>
<span class="font-medium">Elapsed:</span> {{ configWithJob.jobStats.formattedElapsedTime }}
</div>
<div v-if="configWithJob.runningJob?.status === BackupJobStatus.RUNNING">
<span class="font-medium">ETA:</span> {{ configWithJob.jobStats.formattedEta }}
</div>
<div v-else>
<span class="font-medium">Status:</span>
{{ configWithJob.runningJob?.status ? configWithJob.runningJob.status.charAt(0).toUpperCase() + configWithJob.runningJob.status.slice(1).toLowerCase() : 'Unknown' }}
{{
configWithJob.runningJob?.status
? configWithJob.runningJob.status.charAt(0).toUpperCase() +
configWithJob.runningJob.status.slice(1).toLowerCase()
: 'Unknown'
}}
</div>
<div><span class="font-medium">Files:</span> {{ configWithJob.jobStats.transfers }}</div>
</div>
</div>
<!-- Schedule and status information -->
<div class="grid grid-cols-1 md:grid-cols-3 gap-4">
<div class="grid grid-cols-1 md:grid-cols-4 gap-4">
<div class="bg-gray-50 dark:bg-gray-700 rounded-lg p-3">
<dt class="text-sm font-medium text-gray-500 dark:text-gray-400">Schedule</dt>
<dd class="mt-1 text-sm text-gray-900 dark:text-white">
@@ -360,6 +488,41 @@ async function handleTriggerOrStopJob() {
</dd>
</div>
<div class="bg-gray-50 dark:bg-gray-700 rounded-lg p-3">
<dt class="text-sm font-medium text-gray-500 dark:text-gray-400">Preprocessing</dt>
<dd class="mt-1 text-sm text-gray-900 dark:text-white">
{{ getPreprocessingTypeLabel(configWithJob.preprocessConfig?.type || 'NONE') }}
<span
v-if="
configWithJob.preprocessConfig?.type === 'ZFS' && configWithJob.preprocessConfig.zfsConfig
"
class="block text-xs text-gray-500 dark:text-gray-400 mt-1"
>
{{ configWithJob.preprocessConfig.zfsConfig.poolName }}/{{
configWithJob.preprocessConfig.zfsConfig.datasetName
}}
</span>
<span
v-else-if="
configWithJob.preprocessConfig?.type === 'FLASH' &&
configWithJob.preprocessConfig.flashConfig
"
class="block text-xs text-gray-500 dark:text-gray-400 mt-1"
>
{{ configWithJob.preprocessConfig.flashConfig.flashPath }}
</span>
<span
v-else-if="
configWithJob.preprocessConfig?.type === 'SCRIPT' &&
configWithJob.preprocessConfig.scriptConfig
"
class="block text-xs text-gray-500 dark:text-gray-400 mt-1"
>
{{ configWithJob.preprocessConfig.scriptConfig.scriptPath }}
</span>
</dd>
</div>
<div class="bg-gray-50 dark:bg-gray-700 rounded-lg p-3">
<dt class="text-sm font-medium text-gray-500 dark:text-gray-400">Last Run</dt>
<dd class="mt-1 text-sm text-gray-900 dark:text-white">
@@ -375,4 +538,4 @@ async function handleTriggerOrStopJob() {
</div>
</div>
</div>
</template>
</template>

View File

@@ -50,15 +50,47 @@ export const RCLONE_JOB_FRAGMENT = graphql(/* GraphQL */ `
}
`);
export const PREPROCESS_CONFIG_FRAGMENT = graphql(/* GraphQL */ `
fragment PreprocessConfig on PreprocessConfig {
type
timeout
cleanupOnFailure
zfsConfig {
poolName
datasetName
snapshotPrefix
cleanupSnapshots
retainSnapshots
}
flashConfig {
flashPath
includeGitHistory
additionalPaths
}
scriptConfig {
scriptPath
scriptArgs
workingDirectory
environment
outputPath
}
}
`);
export const BACKUP_JOB_CONFIG_FRAGMENT = graphql(/* GraphQL */ `
fragment BackupJobConfig on BackupJobConfig {
id
name
backupMode
sourcePath
remoteName
destinationPath
schedule
enabled
rcloneOptions
preprocessConfig {
...PreprocessConfig
}
createdAt
updatedAt
lastRunAt
@@ -178,7 +210,6 @@ export const TRIGGER_BACKUP_JOB_MUTATION = graphql(/* GraphQL */ `
mutation TriggerBackupJob($id: PrefixedID!) {
backup {
triggerJob(id: $id) {
status
jobId
}
}

View File

@@ -22,7 +22,8 @@ type Documents = {
"\n query ApiKeyMeta {\n apiKeyPossibleRoles\n apiKeyPossiblePermissions {\n resource\n actions\n }\n }\n": typeof types.ApiKeyMetaDocument,
"\n fragment BackupStats on RCloneJobStats {\n bytes\n speed\n eta\n elapsedTime\n percentage\n checks\n deletes\n errors\n fatalError\n lastError\n renames\n retryError\n serverSideCopies\n serverSideCopyBytes\n serverSideMoves\n serverSideMoveBytes\n totalBytes\n totalChecks\n totalTransfers\n transferTime\n transfers\n transferring\n checking\n formattedBytes\n formattedSpeed\n formattedElapsedTime\n formattedEta\n calculatedPercentage\n isActivelyRunning\n isCompleted\n }\n": typeof types.BackupStatsFragmentDoc,
"\n fragment RCloneJob on RCloneJob {\n id\n group\n configId\n finished\n success\n error\n status\n stats {\n ...BackupStats\n }\n }\n": typeof types.RCloneJobFragmentDoc,
"\n fragment BackupJobConfig on BackupJobConfig {\n id\n name\n sourcePath\n remoteName\n destinationPath\n schedule\n enabled\n createdAt\n updatedAt\n lastRunAt\n lastRunStatus\n currentJobId\n }\n": typeof types.BackupJobConfigFragmentDoc,
"\n fragment PreprocessConfig on PreprocessConfig {\n type\n timeout\n cleanupOnFailure\n zfsConfig {\n poolName\n datasetName\n snapshotPrefix\n cleanupSnapshots\n retainSnapshots\n }\n flashConfig {\n flashPath\n includeGitHistory\n additionalPaths\n }\n scriptConfig {\n scriptPath\n scriptArgs\n workingDirectory\n environment\n outputPath\n }\n }\n": typeof types.PreprocessConfigFragmentDoc,
"\n fragment BackupJobConfig on BackupJobConfig {\n id\n name\n backupMode\n sourcePath\n remoteName\n destinationPath\n schedule\n enabled\n rcloneOptions\n preprocessConfig {\n ...PreprocessConfig\n }\n createdAt\n updatedAt\n lastRunAt\n lastRunStatus\n currentJobId\n }\n": typeof types.BackupJobConfigFragmentDoc,
"\n fragment BackupJobConfigWithCurrentJob on BackupJobConfig {\n ...BackupJobConfig\n currentJob {\n ...RCloneJob\n }\n }\n": typeof types.BackupJobConfigWithCurrentJobFragmentDoc,
"\n query BackupJobs {\n backup {\n id\n jobs {\n ...RCloneJob\n }\n }\n }\n": typeof types.BackupJobsDocument,
"\n query BackupJob($id: PrefixedID!) {\n backupJob(id: $id) {\n ...RCloneJob\n }\n }\n": typeof types.BackupJobDocument,
@@ -34,7 +35,7 @@ type Documents = {
"\n mutation UpdateBackupJobConfig($id: PrefixedID!, $input: UpdateBackupJobConfigInput!) {\n backup {\n updateBackupJobConfig(id: $id, input: $input) {\n ...BackupJobConfig\n }\n }\n }\n": typeof types.UpdateBackupJobConfigDocument,
"\n mutation DeleteBackupJobConfig($id: PrefixedID!) {\n backup {\n deleteBackupJobConfig(id: $id)\n }\n }\n": typeof types.DeleteBackupJobConfigDocument,
"\n mutation ToggleBackupJobConfig($id: PrefixedID!) {\n backup {\n toggleJobConfig(id: $id) {\n ...BackupJobConfig\n }\n }\n }\n": typeof types.ToggleBackupJobConfigDocument,
"\n mutation TriggerBackupJob($id: PrefixedID!) {\n backup {\n triggerJob(id: $id) {\n status\n jobId\n }\n }\n }\n": typeof types.TriggerBackupJobDocument,
"\n mutation TriggerBackupJob($id: PrefixedID!) {\n backup {\n triggerJob(id: $id) {\n jobId\n }\n }\n }\n": typeof types.TriggerBackupJobDocument,
"\n mutation StopBackupJob($id: PrefixedID!) {\n backup {\n stopBackupJob(id: $id) {\n status\n jobId\n }\n }\n }\n": typeof types.StopBackupJobDocument,
"\n mutation InitiateBackup($input: InitiateBackupInput!) {\n backup {\n initiateBackup(input: $input) {\n status\n jobId\n }\n }\n }\n": typeof types.InitiateBackupDocument,
"\n subscription BackupJobProgress($id: PrefixedID!) {\n backupJobProgress(id: $id) {\n id\n stats {\n ...BackupStats\n }\n }\n }\n": typeof types.BackupJobProgressDocument,
@@ -77,7 +78,8 @@ const documents: Documents = {
"\n query ApiKeyMeta {\n apiKeyPossibleRoles\n apiKeyPossiblePermissions {\n resource\n actions\n }\n }\n": types.ApiKeyMetaDocument,
"\n fragment BackupStats on RCloneJobStats {\n bytes\n speed\n eta\n elapsedTime\n percentage\n checks\n deletes\n errors\n fatalError\n lastError\n renames\n retryError\n serverSideCopies\n serverSideCopyBytes\n serverSideMoves\n serverSideMoveBytes\n totalBytes\n totalChecks\n totalTransfers\n transferTime\n transfers\n transferring\n checking\n formattedBytes\n formattedSpeed\n formattedElapsedTime\n formattedEta\n calculatedPercentage\n isActivelyRunning\n isCompleted\n }\n": types.BackupStatsFragmentDoc,
"\n fragment RCloneJob on RCloneJob {\n id\n group\n configId\n finished\n success\n error\n status\n stats {\n ...BackupStats\n }\n }\n": types.RCloneJobFragmentDoc,
"\n fragment BackupJobConfig on BackupJobConfig {\n id\n name\n sourcePath\n remoteName\n destinationPath\n schedule\n enabled\n createdAt\n updatedAt\n lastRunAt\n lastRunStatus\n currentJobId\n }\n": types.BackupJobConfigFragmentDoc,
"\n fragment PreprocessConfig on PreprocessConfig {\n type\n timeout\n cleanupOnFailure\n zfsConfig {\n poolName\n datasetName\n snapshotPrefix\n cleanupSnapshots\n retainSnapshots\n }\n flashConfig {\n flashPath\n includeGitHistory\n additionalPaths\n }\n scriptConfig {\n scriptPath\n scriptArgs\n workingDirectory\n environment\n outputPath\n }\n }\n": types.PreprocessConfigFragmentDoc,
"\n fragment BackupJobConfig on BackupJobConfig {\n id\n name\n backupMode\n sourcePath\n remoteName\n destinationPath\n schedule\n enabled\n rcloneOptions\n preprocessConfig {\n ...PreprocessConfig\n }\n createdAt\n updatedAt\n lastRunAt\n lastRunStatus\n currentJobId\n }\n": types.BackupJobConfigFragmentDoc,
"\n fragment BackupJobConfigWithCurrentJob on BackupJobConfig {\n ...BackupJobConfig\n currentJob {\n ...RCloneJob\n }\n }\n": types.BackupJobConfigWithCurrentJobFragmentDoc,
"\n query BackupJobs {\n backup {\n id\n jobs {\n ...RCloneJob\n }\n }\n }\n": types.BackupJobsDocument,
"\n query BackupJob($id: PrefixedID!) {\n backupJob(id: $id) {\n ...RCloneJob\n }\n }\n": types.BackupJobDocument,
@@ -89,7 +91,7 @@ const documents: Documents = {
"\n mutation UpdateBackupJobConfig($id: PrefixedID!, $input: UpdateBackupJobConfigInput!) {\n backup {\n updateBackupJobConfig(id: $id, input: $input) {\n ...BackupJobConfig\n }\n }\n }\n": types.UpdateBackupJobConfigDocument,
"\n mutation DeleteBackupJobConfig($id: PrefixedID!) {\n backup {\n deleteBackupJobConfig(id: $id)\n }\n }\n": types.DeleteBackupJobConfigDocument,
"\n mutation ToggleBackupJobConfig($id: PrefixedID!) {\n backup {\n toggleJobConfig(id: $id) {\n ...BackupJobConfig\n }\n }\n }\n": types.ToggleBackupJobConfigDocument,
"\n mutation TriggerBackupJob($id: PrefixedID!) {\n backup {\n triggerJob(id: $id) {\n status\n jobId\n }\n }\n }\n": types.TriggerBackupJobDocument,
"\n mutation TriggerBackupJob($id: PrefixedID!) {\n backup {\n triggerJob(id: $id) {\n jobId\n }\n }\n }\n": types.TriggerBackupJobDocument,
"\n mutation StopBackupJob($id: PrefixedID!) {\n backup {\n stopBackupJob(id: $id) {\n status\n jobId\n }\n }\n }\n": types.StopBackupJobDocument,
"\n mutation InitiateBackup($input: InitiateBackupInput!) {\n backup {\n initiateBackup(input: $input) {\n status\n jobId\n }\n }\n }\n": types.InitiateBackupDocument,
"\n subscription BackupJobProgress($id: PrefixedID!) {\n backupJobProgress(id: $id) {\n id\n stats {\n ...BackupStats\n }\n }\n }\n": types.BackupJobProgressDocument,
@@ -173,7 +175,11 @@ export function graphql(source: "\n fragment RCloneJob on RCloneJob {\n id\n
/**
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
*/
export function graphql(source: "\n fragment BackupJobConfig on BackupJobConfig {\n id\n name\n sourcePath\n remoteName\n destinationPath\n schedule\n enabled\n createdAt\n updatedAt\n lastRunAt\n lastRunStatus\n currentJobId\n }\n"): (typeof documents)["\n fragment BackupJobConfig on BackupJobConfig {\n id\n name\n sourcePath\n remoteName\n destinationPath\n schedule\n enabled\n createdAt\n updatedAt\n lastRunAt\n lastRunStatus\n currentJobId\n }\n"];
export function graphql(source: "\n fragment PreprocessConfig on PreprocessConfig {\n type\n timeout\n cleanupOnFailure\n zfsConfig {\n poolName\n datasetName\n snapshotPrefix\n cleanupSnapshots\n retainSnapshots\n }\n flashConfig {\n flashPath\n includeGitHistory\n additionalPaths\n }\n scriptConfig {\n scriptPath\n scriptArgs\n workingDirectory\n environment\n outputPath\n }\n }\n"): (typeof documents)["\n fragment PreprocessConfig on PreprocessConfig {\n type\n timeout\n cleanupOnFailure\n zfsConfig {\n poolName\n datasetName\n snapshotPrefix\n cleanupSnapshots\n retainSnapshots\n }\n flashConfig {\n flashPath\n includeGitHistory\n additionalPaths\n }\n scriptConfig {\n scriptPath\n scriptArgs\n workingDirectory\n environment\n outputPath\n }\n }\n"];
/**
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
*/
export function graphql(source: "\n fragment BackupJobConfig on BackupJobConfig {\n id\n name\n backupMode\n sourcePath\n remoteName\n destinationPath\n schedule\n enabled\n rcloneOptions\n preprocessConfig {\n ...PreprocessConfig\n }\n createdAt\n updatedAt\n lastRunAt\n lastRunStatus\n currentJobId\n }\n"): (typeof documents)["\n fragment BackupJobConfig on BackupJobConfig {\n id\n name\n backupMode\n sourcePath\n remoteName\n destinationPath\n schedule\n enabled\n rcloneOptions\n preprocessConfig {\n ...PreprocessConfig\n }\n createdAt\n updatedAt\n lastRunAt\n lastRunStatus\n currentJobId\n }\n"];
/**
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
*/
@@ -221,7 +227,7 @@ export function graphql(source: "\n mutation ToggleBackupJobConfig($id: Prefixe
/**
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
*/
export function graphql(source: "\n mutation TriggerBackupJob($id: PrefixedID!) {\n backup {\n triggerJob(id: $id) {\n status\n jobId\n }\n }\n }\n"): (typeof documents)["\n mutation TriggerBackupJob($id: PrefixedID!) {\n backup {\n triggerJob(id: $id) {\n status\n jobId\n }\n }\n }\n"];
export function graphql(source: "\n mutation TriggerBackupJob($id: PrefixedID!) {\n backup {\n triggerJob(id: $id) {\n jobId\n }\n }\n }\n"): (typeof documents)["\n mutation TriggerBackupJob($id: PrefixedID!) {\n backup {\n triggerJob(id: $id) {\n jobId\n }\n }\n }\n"];
/**
* The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients.
*/

File diff suppressed because one or more lines are too long