diff --git a/agents/patchmon-agent-linux-386 b/agents/patchmon-agent-linux-386 index b4947aa..c673215 100755 Binary files a/agents/patchmon-agent-linux-386 and b/agents/patchmon-agent-linux-386 differ diff --git a/agents/patchmon-agent-linux-amd64 b/agents/patchmon-agent-linux-amd64 index e26e4e7..f129b20 100755 Binary files a/agents/patchmon-agent-linux-amd64 and b/agents/patchmon-agent-linux-amd64 differ diff --git a/agents/patchmon-agent-linux-arm b/agents/patchmon-agent-linux-arm index 7ab958c..43fb562 100755 Binary files a/agents/patchmon-agent-linux-arm and b/agents/patchmon-agent-linux-arm differ diff --git a/agents/patchmon-agent-linux-arm64 b/agents/patchmon-agent-linux-arm64 index 4b479b0..02c896e 100755 Binary files a/agents/patchmon-agent-linux-arm64 and b/agents/patchmon-agent-linux-arm64 differ diff --git a/agents/patchmon_install.sh b/agents/patchmon_install.sh index 82a0b4b..315f117 100644 --- a/agents/patchmon_install.sh +++ b/agents/patchmon_install.sh @@ -356,6 +356,7 @@ api_version: "v1" credentials_file: "/etc/patchmon/credentials.yml" log_file: "/etc/patchmon/logs/patchmon-agent.log" log_level: "info" +skip_ssl_verify: ${SKIP_SSL_VERIFY:-false} EOF # Create credentials file diff --git a/backend/env.example b/backend/env.example index 5a9a6a7..e5a0eab 100644 --- a/backend/env.example +++ b/backend/env.example @@ -3,6 +3,13 @@ DATABASE_URL="postgresql://patchmon_user:your-password-here@localhost:5432/patch PM_DB_CONN_MAX_ATTEMPTS=30 PM_DB_CONN_WAIT_INTERVAL=2 +# Database Connection Pool Configuration (Prisma) +DB_CONNECTION_LIMIT=30 # Maximum connections per instance (default: 30) +DB_POOL_TIMEOUT=20 # Seconds to wait for available connection (default: 20) +DB_CONNECT_TIMEOUT=10 # Seconds to wait for initial connection (default: 10) +DB_IDLE_TIMEOUT=300 # Seconds before closing idle connections (default: 300) +DB_MAX_LIFETIME=1800 # Maximum lifetime of a connection in seconds (default: 1800) + # JWT Configuration JWT_SECRET=your-secure-random-secret-key-change-this-in-production JWT_EXPIRES_IN=1h diff --git a/backend/package.json b/backend/package.json index 9108d93..4387c75 100644 --- a/backend/package.json +++ b/backend/package.json @@ -1,6 +1,6 @@ { "name": "patchmon-backend", - "version": "1.3.0", + "version": "1.3.1", "description": "Backend API for Linux Patch Monitoring System", "license": "AGPL-3.0", "main": "src/server.js", diff --git a/backend/prisma/migrations/20251026_add_color_theme_to_settings/migration.sql b/backend/prisma/migrations/20251026_add_color_theme_to_settings/migration.sql new file mode 100644 index 0000000..ac7afc8 --- /dev/null +++ b/backend/prisma/migrations/20251026_add_color_theme_to_settings/migration.sql @@ -0,0 +1,4 @@ +-- AlterTable +-- Add color_theme field to settings table for customizable app theming +ALTER TABLE "settings" ADD COLUMN "color_theme" TEXT NOT NULL DEFAULT 'default'; + diff --git a/backend/prisma/migrations/20251026_add_metrics_telemetry/migration.sql b/backend/prisma/migrations/20251026_add_metrics_telemetry/migration.sql new file mode 100644 index 0000000..0cee82a --- /dev/null +++ b/backend/prisma/migrations/20251026_add_metrics_telemetry/migration.sql @@ -0,0 +1,14 @@ +-- AddMetricsTelemetry +-- Add anonymous metrics and telemetry fields to settings table + +-- Add metrics fields to settings table +ALTER TABLE "settings" ADD COLUMN "metrics_enabled" BOOLEAN NOT NULL DEFAULT true; +ALTER TABLE "settings" ADD COLUMN "metrics_anonymous_id" TEXT; +ALTER TABLE "settings" ADD COLUMN "metrics_last_sent" TIMESTAMP(3); + +-- Generate UUID for existing records (if any exist) +-- This will use PostgreSQL's gen_random_uuid() function +UPDATE "settings" +SET "metrics_anonymous_id" = gen_random_uuid()::text +WHERE "metrics_anonymous_id" IS NULL; + diff --git a/backend/prisma/schema.prisma b/backend/prisma/schema.prisma index 241b3f6..cb3825e 100644 --- a/backend/prisma/schema.prisma +++ b/backend/prisma/schema.prisma @@ -170,27 +170,31 @@ model role_permissions { } model settings { - id String @id - server_url String @default("http://localhost:3001") - server_protocol String @default("http") - server_host String @default("localhost") - server_port Int @default(3001) - created_at DateTime @default(now()) - updated_at DateTime - update_interval Int @default(60) - auto_update Boolean @default(false) - github_repo_url String @default("https://github.com/PatchMon/PatchMon.git") - ssh_key_path String? - repository_type String @default("public") - last_update_check DateTime? - latest_version String? - update_available Boolean @default(false) - signup_enabled Boolean @default(false) - default_user_role String @default("user") - ignore_ssl_self_signed Boolean @default(false) - logo_dark String? @default("/assets/logo_dark.png") - logo_light String? @default("/assets/logo_light.png") - favicon String? @default("/assets/logo_square.svg") + id String @id + server_url String @default("http://localhost:3001") + server_protocol String @default("http") + server_host String @default("localhost") + server_port Int @default(3001) + created_at DateTime @default(now()) + updated_at DateTime + update_interval Int @default(60) + auto_update Boolean @default(false) + github_repo_url String @default("https://github.com/PatchMon/PatchMon.git") + ssh_key_path String? + repository_type String @default("public") + last_update_check DateTime? + latest_version String? + update_available Boolean @default(false) + signup_enabled Boolean @default(false) + default_user_role String @default("user") + ignore_ssl_self_signed Boolean @default(false) + logo_dark String? @default("/assets/logo_dark.png") + logo_light String? @default("/assets/logo_light.png") + favicon String? @default("/assets/logo_square.svg") + metrics_enabled Boolean @default(true) + metrics_anonymous_id String? + metrics_last_sent DateTime? + color_theme String @default("default") } model update_history { diff --git a/backend/src/config/prisma.js b/backend/src/config/prisma.js index 6908c2a..3a71075 100644 --- a/backend/src/config/prisma.js +++ b/backend/src/config/prisma.js @@ -16,12 +16,28 @@ function getOptimizedDatabaseUrl() { // Parse the URL const url = new URL(originalUrl); - // Add connection pooling parameters for multiple instances - url.searchParams.set("connection_limit", "5"); // Reduced from default 10 - url.searchParams.set("pool_timeout", "10"); // 10 seconds - url.searchParams.set("connect_timeout", "10"); // 10 seconds - url.searchParams.set("idle_timeout", "300"); // 5 minutes - url.searchParams.set("max_lifetime", "1800"); // 30 minutes + // Add connection pooling parameters - configurable via environment variables + const connectionLimit = process.env.DB_CONNECTION_LIMIT || "30"; + const poolTimeout = process.env.DB_POOL_TIMEOUT || "20"; + const connectTimeout = process.env.DB_CONNECT_TIMEOUT || "10"; + const idleTimeout = process.env.DB_IDLE_TIMEOUT || "300"; + const maxLifetime = process.env.DB_MAX_LIFETIME || "1800"; + + url.searchParams.set("connection_limit", connectionLimit); + url.searchParams.set("pool_timeout", poolTimeout); + url.searchParams.set("connect_timeout", connectTimeout); + url.searchParams.set("idle_timeout", idleTimeout); + url.searchParams.set("max_lifetime", maxLifetime); + + // Log connection pool settings in development/debug mode + if ( + process.env.ENABLE_LOGGING === "true" || + process.env.LOG_LEVEL === "debug" + ) { + console.log( + `[Database Pool] connection_limit=${connectionLimit}, pool_timeout=${poolTimeout}s, connect_timeout=${connectTimeout}s`, + ); + } return url.toString(); } diff --git a/backend/src/routes/automationRoutes.js b/backend/src/routes/automationRoutes.js index f216aff..9d9dad9 100644 --- a/backend/src/routes/automationRoutes.js +++ b/backend/src/routes/automationRoutes.js @@ -218,6 +218,30 @@ router.post( }, ); +// Trigger manual Docker inventory cleanup +router.post( + "/trigger/docker-inventory-cleanup", + authenticateToken, + async (_req, res) => { + try { + const job = await queueManager.triggerDockerInventoryCleanup(); + res.json({ + success: true, + data: { + jobId: job.id, + message: "Docker inventory cleanup triggered successfully", + }, + }); + } catch (error) { + console.error("Error triggering Docker inventory cleanup:", error); + res.status(500).json({ + success: false, + error: "Failed to trigger Docker inventory cleanup", + }); + } + }, +); + // Get queue health status router.get("/health", authenticateToken, async (_req, res) => { try { @@ -274,6 +298,7 @@ router.get("/overview", authenticateToken, async (_req, res) => { queueManager.getRecentJobs(QUEUE_NAMES.SESSION_CLEANUP, 1), queueManager.getRecentJobs(QUEUE_NAMES.ORPHANED_REPO_CLEANUP, 1), queueManager.getRecentJobs(QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP, 1), + queueManager.getRecentJobs(QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP, 1), queueManager.getRecentJobs(QUEUE_NAMES.AGENT_COMMANDS, 1), ]); @@ -283,19 +308,22 @@ router.get("/overview", authenticateToken, async (_req, res) => { stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].delayed + stats[QUEUE_NAMES.SESSION_CLEANUP].delayed + stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].delayed + - stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].delayed, + stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].delayed + + stats[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].delayed, runningTasks: stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].active + stats[QUEUE_NAMES.SESSION_CLEANUP].active + stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].active + - stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].active, + stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].active + + stats[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].active, failedTasks: stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].failed + stats[QUEUE_NAMES.SESSION_CLEANUP].failed + stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].failed + - stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].failed, + stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].failed + + stats[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].failed, totalAutomations: Object.values(stats).reduce((sum, queueStats) => { return ( @@ -375,10 +403,11 @@ router.get("/overview", authenticateToken, async (_req, res) => { stats: stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP], }, { - name: "Collect Host Statistics", - queue: QUEUE_NAMES.AGENT_COMMANDS, - description: "Collects package statistics from connected agents only", - schedule: `Every ${settings.update_interval} minutes (Agent-driven)`, + name: "Docker Inventory Cleanup", + queue: QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP, + description: + "Removes Docker containers and images for non-existent hosts", + schedule: "Daily at 4 AM", lastRun: recentJobs[4][0]?.finishedOn ? new Date(recentJobs[4][0].finishedOn).toLocaleString() : "Never", @@ -388,6 +417,22 @@ router.get("/overview", authenticateToken, async (_req, res) => { : recentJobs[4][0] ? "Success" : "Never run", + stats: stats[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP], + }, + { + name: "Collect Host Statistics", + queue: QUEUE_NAMES.AGENT_COMMANDS, + description: "Collects package statistics from connected agents only", + schedule: `Every ${settings.update_interval} minutes (Agent-driven)`, + lastRun: recentJobs[5][0]?.finishedOn + ? new Date(recentJobs[5][0].finishedOn).toLocaleString() + : "Never", + lastRunTimestamp: recentJobs[5][0]?.finishedOn || 0, + status: recentJobs[5][0]?.failedReason + ? "Failed" + : recentJobs[5][0] + ? "Success" + : "Never run", stats: stats[QUEUE_NAMES.AGENT_COMMANDS], }, ].sort((a, b) => { diff --git a/backend/src/routes/dashboardRoutes.js b/backend/src/routes/dashboardRoutes.js index 386eb91..e995737 100644 --- a/backend/src/routes/dashboardRoutes.js +++ b/backend/src/routes/dashboardRoutes.js @@ -193,11 +193,16 @@ router.get( }, ); -// Get hosts with their update status +// Get hosts with their update status - OPTIMIZED router.get("/hosts", authenticateToken, requireViewHosts, async (_req, res) => { try { + // Get settings once (outside the loop) + const settings = await prisma.settings.findFirst(); + const updateIntervalMinutes = settings?.update_interval || 60; + const thresholdMinutes = updateIntervalMinutes * 2; + + // Fetch hosts with groups const hosts = await prisma.hosts.findMany({ - // Show all hosts regardless of status select: { id: true, machine_id: true, @@ -223,61 +228,65 @@ router.get("/hosts", authenticateToken, requireViewHosts, async (_req, res) => { }, }, }, - _count: { - select: { - host_packages: { - where: { - needs_update: true, - }, - }, - }, - }, }, orderBy: { last_update: "desc" }, }); - // Get update counts for each host separately - const hostsWithUpdateInfo = await Promise.all( - hosts.map(async (host) => { - const updatesCount = await prisma.host_packages.count({ - where: { - host_id: host.id, - needs_update: true, - }, - }); + // OPTIMIZATION: Get all package counts in 2 batch queries instead of N*2 queries + const hostIds = hosts.map((h) => h.id); - // Get total packages count for this host - const totalPackagesCount = await prisma.host_packages.count({ - where: { - host_id: host.id, - }, - }); - - // Get the agent update interval setting for stale calculation - const settings = await prisma.settings.findFirst(); - const updateIntervalMinutes = settings?.update_interval || 60; - const thresholdMinutes = updateIntervalMinutes * 2; - - // Calculate effective status based on reporting interval - const isStale = moment(host.last_update).isBefore( - moment().subtract(thresholdMinutes, "minutes"), - ); - let effectiveStatus = host.status; - - // Override status if host hasn't reported within threshold - if (isStale && host.status === "active") { - effectiveStatus = "inactive"; - } - - return { - ...host, - updatesCount, - totalPackagesCount, - isStale, - effectiveStatus, - }; + const [updateCounts, totalCounts] = await Promise.all([ + // Get update counts for all hosts at once + prisma.host_packages.groupBy({ + by: ["host_id"], + where: { + host_id: { in: hostIds }, + needs_update: true, + }, + _count: { id: true }, }), + // Get total counts for all hosts at once + prisma.host_packages.groupBy({ + by: ["host_id"], + where: { + host_id: { in: hostIds }, + }, + _count: { id: true }, + }), + ]); + + // Create lookup maps for O(1) access + const updateCountMap = new Map( + updateCounts.map((item) => [item.host_id, item._count.id]), ); + const totalCountMap = new Map( + totalCounts.map((item) => [item.host_id, item._count.id]), + ); + + // Process hosts with counts from maps (no more DB queries!) + const hostsWithUpdateInfo = hosts.map((host) => { + const updatesCount = updateCountMap.get(host.id) || 0; + const totalPackagesCount = totalCountMap.get(host.id) || 0; + + // Calculate effective status based on reporting interval + const isStale = moment(host.last_update).isBefore( + moment().subtract(thresholdMinutes, "minutes"), + ); + let effectiveStatus = host.status; + + // Override status if host hasn't reported within threshold + if (isStale && host.status === "active") { + effectiveStatus = "inactive"; + } + + return { + ...host, + updatesCount, + totalPackagesCount, + isStale, + effectiveStatus, + }; + }); res.json(hostsWithUpdateInfo); } catch (error) { diff --git a/backend/src/routes/dockerRoutes.js b/backend/src/routes/dockerRoutes.js index 271e33a..db3a4eb 100644 --- a/backend/src/routes/dockerRoutes.js +++ b/backend/src/routes/dockerRoutes.js @@ -522,7 +522,8 @@ router.get("/updates", authenticateToken, async (req, res) => { } }); -// POST /api/v1/docker/collect - Collect Docker data from agent +// POST /api/v1/docker/collect - Collect Docker data from agent (DEPRECATED - kept for backward compatibility) +// New agents should use POST /api/v1/integrations/docker router.post("/collect", async (req, res) => { try { const { apiId, apiKey, containers, images, updates } = req.body; @@ -745,6 +746,322 @@ router.post("/collect", async (req, res) => { } }); +// POST /api/v1/integrations/docker - New integration endpoint for Docker data collection +router.post("/../integrations/docker", async (req, res) => { + try { + const apiId = req.headers["x-api-id"]; + const apiKey = req.headers["x-api-key"]; + const { + containers, + images, + updates, + daemon_info: _daemon_info, + hostname, + machine_id, + agent_version: _agent_version, + } = req.body; + + console.log( + `[Docker Integration] Received data from ${hostname || machine_id}`, + ); + + // Validate API credentials + const host = await prisma.hosts.findFirst({ + where: { api_id: apiId, api_key: apiKey }, + }); + + if (!host) { + console.warn("[Docker Integration] Invalid API credentials"); + return res.status(401).json({ error: "Invalid API credentials" }); + } + + console.log( + `[Docker Integration] Processing for host: ${host.friendly_name}`, + ); + + const now = new Date(); + + // Helper function to validate and parse dates + const parseDate = (dateString) => { + if (!dateString) return now; + const date = new Date(dateString); + return Number.isNaN(date.getTime()) ? now : date; + }; + + let containersProcessed = 0; + let imagesProcessed = 0; + let updatesProcessed = 0; + + // Process containers + if (containers && Array.isArray(containers)) { + console.log( + `[Docker Integration] Processing ${containers.length} containers`, + ); + for (const containerData of containers) { + const containerId = uuidv4(); + + // Find or create image + let imageId = null; + if (containerData.image_repository && containerData.image_tag) { + const image = await prisma.docker_images.upsert({ + where: { + repository_tag_image_id: { + repository: containerData.image_repository, + tag: containerData.image_tag, + image_id: containerData.image_id || "unknown", + }, + }, + update: { + last_checked: now, + updated_at: now, + }, + create: { + id: uuidv4(), + repository: containerData.image_repository, + tag: containerData.image_tag, + image_id: containerData.image_id || "unknown", + source: containerData.image_source || "docker-hub", + created_at: parseDate(containerData.created_at), + updated_at: now, + }, + }); + imageId = image.id; + } + + // Upsert container + await prisma.docker_containers.upsert({ + where: { + host_id_container_id: { + host_id: host.id, + container_id: containerData.container_id, + }, + }, + update: { + name: containerData.name, + image_id: imageId, + image_name: containerData.image_name, + image_tag: containerData.image_tag || "latest", + status: containerData.status, + state: containerData.state || containerData.status, + ports: containerData.ports || null, + started_at: containerData.started_at + ? parseDate(containerData.started_at) + : null, + updated_at: now, + last_checked: now, + }, + create: { + id: containerId, + host_id: host.id, + container_id: containerData.container_id, + name: containerData.name, + image_id: imageId, + image_name: containerData.image_name, + image_tag: containerData.image_tag || "latest", + status: containerData.status, + state: containerData.state || containerData.status, + ports: containerData.ports || null, + created_at: parseDate(containerData.created_at), + started_at: containerData.started_at + ? parseDate(containerData.started_at) + : null, + updated_at: now, + }, + }); + containersProcessed++; + } + } + + // Process standalone images + if (images && Array.isArray(images)) { + console.log(`[Docker Integration] Processing ${images.length} images`); + for (const imageData of images) { + await prisma.docker_images.upsert({ + where: { + repository_tag_image_id: { + repository: imageData.repository, + tag: imageData.tag, + image_id: imageData.image_id, + }, + }, + update: { + size_bytes: imageData.size_bytes + ? BigInt(imageData.size_bytes) + : null, + digest: imageData.digest || null, + last_checked: now, + updated_at: now, + }, + create: { + id: uuidv4(), + repository: imageData.repository, + tag: imageData.tag, + image_id: imageData.image_id, + digest: imageData.digest, + size_bytes: imageData.size_bytes + ? BigInt(imageData.size_bytes) + : null, + source: imageData.source || "docker-hub", + created_at: parseDate(imageData.created_at), + updated_at: now, + }, + }); + imagesProcessed++; + } + } + + // Process updates + if (updates && Array.isArray(updates)) { + console.log(`[Docker Integration] Processing ${updates.length} updates`); + for (const updateData of updates) { + // Find the image by repository and image_id + const image = await prisma.docker_images.findFirst({ + where: { + repository: updateData.repository, + tag: updateData.current_tag, + image_id: updateData.image_id, + }, + }); + + if (image) { + // Store digest info in changelog_url field as JSON + const digestInfo = JSON.stringify({ + method: "digest_comparison", + current_digest: updateData.current_digest, + available_digest: updateData.available_digest, + }); + + // Upsert the update record + await prisma.docker_image_updates.upsert({ + where: { + image_id_available_tag: { + image_id: image.id, + available_tag: updateData.available_tag, + }, + }, + update: { + updated_at: now, + changelog_url: digestInfo, + severity: "digest_changed", + }, + create: { + id: uuidv4(), + image_id: image.id, + current_tag: updateData.current_tag, + available_tag: updateData.available_tag, + severity: "digest_changed", + changelog_url: digestInfo, + updated_at: now, + }, + }); + updatesProcessed++; + } + } + } + + console.log( + `[Docker Integration] Successfully processed: ${containersProcessed} containers, ${imagesProcessed} images, ${updatesProcessed} updates`, + ); + + res.json({ + message: "Docker data collected successfully", + containers_received: containersProcessed, + images_received: imagesProcessed, + updates_found: updatesProcessed, + }); + } catch (error) { + console.error("[Docker Integration] Error collecting Docker data:", error); + console.error("[Docker Integration] Error stack:", error.stack); + res.status(500).json({ + error: "Failed to collect Docker data", + message: error.message, + details: process.env.NODE_ENV === "development" ? error.stack : undefined, + }); + } +}); + +// DELETE /api/v1/docker/containers/:id - Delete a container +router.delete("/containers/:id", authenticateToken, async (req, res) => { + try { + const { id } = req.params; + + // Check if container exists + const container = await prisma.docker_containers.findUnique({ + where: { id }, + }); + + if (!container) { + return res.status(404).json({ error: "Container not found" }); + } + + // Delete the container + await prisma.docker_containers.delete({ + where: { id }, + }); + + console.log(`๐Ÿ—‘๏ธ Deleted container: ${container.name} (${id})`); + + res.json({ + success: true, + message: `Container ${container.name} deleted successfully`, + }); + } catch (error) { + console.error("Error deleting container:", error); + res.status(500).json({ error: "Failed to delete container" }); + } +}); + +// DELETE /api/v1/docker/images/:id - Delete an image +router.delete("/images/:id", authenticateToken, async (req, res) => { + try { + const { id } = req.params; + + // Check if image exists + const image = await prisma.docker_images.findUnique({ + where: { id }, + include: { + _count: { + select: { + docker_containers: true, + }, + }, + }, + }); + + if (!image) { + return res.status(404).json({ error: "Image not found" }); + } + + // Check if image is in use by containers + if (image._count.docker_containers > 0) { + return res.status(400).json({ + error: `Cannot delete image: ${image._count.docker_containers} container(s) are using this image`, + containersCount: image._count.docker_containers, + }); + } + + // Delete image updates first + await prisma.docker_image_updates.deleteMany({ + where: { image_id: id }, + }); + + // Delete the image + await prisma.docker_images.delete({ + where: { id }, + }); + + console.log(`๐Ÿ—‘๏ธ Deleted image: ${image.repository}:${image.tag} (${id})`); + + res.json({ + success: true, + message: `Image ${image.repository}:${image.tag} deleted successfully`, + }); + } catch (error) { + console.error("Error deleting image:", error); + res.status(500).json({ error: "Failed to delete image" }); + } +}); + // GET /api/v1/docker/agent - Serve the Docker agent installation script router.get("/agent", async (_req, res) => { try { diff --git a/backend/src/routes/hostRoutes.js b/backend/src/routes/hostRoutes.js index 830eb26..7170b07 100644 --- a/backend/src/routes/hostRoutes.js +++ b/backend/src/routes/hostRoutes.js @@ -356,6 +356,26 @@ router.post( }); } catch (error) { console.error("Host creation error:", error); + + // Check if error is related to connection pool exhaustion + if ( + error.message && + (error.message.includes("connection pool") || + error.message.includes("Timed out fetching") || + error.message.includes("pool timeout")) + ) { + console.error("โš ๏ธ DATABASE CONNECTION POOL EXHAUSTED!"); + console.error( + `โš ๏ธ Current limit: DB_CONNECTION_LIMIT=${process.env.DB_CONNECTION_LIMIT || "30"}`, + ); + console.error( + `โš ๏ธ Pool timeout: DB_POOL_TIMEOUT=${process.env.DB_POOL_TIMEOUT || "20"}s`, + ); + console.error( + "โš ๏ธ Suggestion: Increase DB_CONNECTION_LIMIT in your .env file", + ); + } + res.status(500).json({ error: "Failed to create host" }); } }, @@ -786,19 +806,41 @@ router.get("/info", validateApiCredentials, async (req, res) => { // Ping endpoint for health checks (now uses API credentials) router.post("/ping", validateApiCredentials, async (req, res) => { try { - // Update last update timestamp + const now = new Date(); + const lastUpdate = req.hostRecord.last_update; + + // Detect if this is an agent startup (first ping or after long absence) + const timeSinceLastUpdate = lastUpdate ? now - lastUpdate : null; + const isStartup = + !timeSinceLastUpdate || timeSinceLastUpdate > 5 * 60 * 1000; // 5 minutes + + // Log agent startup + if (isStartup) { + console.log( + `๐Ÿš€ Agent startup detected: ${req.hostRecord.friendly_name} (${req.hostRecord.hostname || req.hostRecord.api_id})`, + ); + + // Check if status was previously offline + if (req.hostRecord.status === "offline") { + console.log(`โœ… Agent back online: ${req.hostRecord.friendly_name}`); + } + } + + // Update last update timestamp and set status to active await prisma.hosts.update({ where: { id: req.hostRecord.id }, data: { - last_update: new Date(), - updated_at: new Date(), + last_update: now, + updated_at: now, + status: "active", }, }); const response = { message: "Ping successful", - timestamp: new Date().toISOString(), + timestamp: now.toISOString(), friendlyName: req.hostRecord.friendly_name, + agentStartup: isStartup, }; // Check if this is a crontab update trigger @@ -1441,10 +1483,12 @@ router.get("/install", async (req, res) => { // Determine curl flags dynamically from settings (ignore self-signed) let curlFlags = "-s"; + let skipSSLVerify = "false"; try { const settings = await prisma.settings.findFirst(); if (settings && settings.ignore_ssl_self_signed === true) { curlFlags = "-sk"; + skipSSLVerify = "true"; } } catch (_) {} @@ -1454,12 +1498,13 @@ router.get("/install", async (req, res) => { // Get architecture parameter (default to amd64) const architecture = req.query.arch || "amd64"; - // Inject the API credentials, server URL, curl flags, force flag, and architecture into the script + // Inject the API credentials, server URL, curl flags, SSL verify flag, force flag, and architecture into the script const envVars = `#!/bin/bash export PATCHMON_URL="${serverUrl}" export API_ID="${host.api_id}" export API_KEY="${host.api_key}" export CURL_FLAGS="${curlFlags}" +export SKIP_SSL_VERIFY="${skipSSLVerify}" export FORCE_INSTALL="${forceInstall ? "true" : "false"}" export ARCHITECTURE="${architecture}" diff --git a/backend/src/routes/integrationRoutes.js b/backend/src/routes/integrationRoutes.js new file mode 100644 index 0000000..fccdf9a --- /dev/null +++ b/backend/src/routes/integrationRoutes.js @@ -0,0 +1,242 @@ +const express = require("express"); +const { getPrismaClient } = require("../config/prisma"); +const { v4: uuidv4 } = require("uuid"); + +const prisma = getPrismaClient(); +const router = express.Router(); + +// POST /api/v1/integrations/docker - Docker data collection endpoint +router.post("/docker", async (req, res) => { + try { + const apiId = req.headers["x-api-id"]; + const apiKey = req.headers["x-api-key"]; + const { + containers, + images, + updates, + daemon_info: _daemon_info, + hostname, + machine_id, + agent_version: _agent_version, + } = req.body; + + console.log( + `[Docker Integration] Received data from ${hostname || machine_id}`, + ); + + // Validate API credentials + const host = await prisma.hosts.findFirst({ + where: { api_id: apiId, api_key: apiKey }, + }); + + if (!host) { + console.warn("[Docker Integration] Invalid API credentials"); + return res.status(401).json({ error: "Invalid API credentials" }); + } + + console.log( + `[Docker Integration] Processing for host: ${host.friendly_name}`, + ); + + const now = new Date(); + + // Helper function to validate and parse dates + const parseDate = (dateString) => { + if (!dateString) return now; + const date = new Date(dateString); + return Number.isNaN(date.getTime()) ? now : date; + }; + + let containersProcessed = 0; + let imagesProcessed = 0; + let updatesProcessed = 0; + + // Process containers + if (containers && Array.isArray(containers)) { + console.log( + `[Docker Integration] Processing ${containers.length} containers`, + ); + for (const containerData of containers) { + const containerId = uuidv4(); + + // Find or create image + let imageId = null; + if (containerData.image_repository && containerData.image_tag) { + const image = await prisma.docker_images.upsert({ + where: { + repository_tag_image_id: { + repository: containerData.image_repository, + tag: containerData.image_tag, + image_id: containerData.image_id || "unknown", + }, + }, + update: { + last_checked: now, + updated_at: now, + }, + create: { + id: uuidv4(), + repository: containerData.image_repository, + tag: containerData.image_tag, + image_id: containerData.image_id || "unknown", + source: containerData.image_source || "docker-hub", + created_at: parseDate(containerData.created_at), + updated_at: now, + }, + }); + imageId = image.id; + } + + // Upsert container + await prisma.docker_containers.upsert({ + where: { + host_id_container_id: { + host_id: host.id, + container_id: containerData.container_id, + }, + }, + update: { + name: containerData.name, + image_id: imageId, + image_name: containerData.image_name, + image_tag: containerData.image_tag || "latest", + status: containerData.status, + state: containerData.state || containerData.status, + ports: containerData.ports || null, + started_at: containerData.started_at + ? parseDate(containerData.started_at) + : null, + updated_at: now, + last_checked: now, + }, + create: { + id: containerId, + host_id: host.id, + container_id: containerData.container_id, + name: containerData.name, + image_id: imageId, + image_name: containerData.image_name, + image_tag: containerData.image_tag || "latest", + status: containerData.status, + state: containerData.state || containerData.status, + ports: containerData.ports || null, + created_at: parseDate(containerData.created_at), + started_at: containerData.started_at + ? parseDate(containerData.started_at) + : null, + updated_at: now, + }, + }); + containersProcessed++; + } + } + + // Process standalone images + if (images && Array.isArray(images)) { + console.log(`[Docker Integration] Processing ${images.length} images`); + for (const imageData of images) { + await prisma.docker_images.upsert({ + where: { + repository_tag_image_id: { + repository: imageData.repository, + tag: imageData.tag, + image_id: imageData.image_id, + }, + }, + update: { + size_bytes: imageData.size_bytes + ? BigInt(imageData.size_bytes) + : null, + digest: imageData.digest || null, + last_checked: now, + updated_at: now, + }, + create: { + id: uuidv4(), + repository: imageData.repository, + tag: imageData.tag, + image_id: imageData.image_id, + digest: imageData.digest, + size_bytes: imageData.size_bytes + ? BigInt(imageData.size_bytes) + : null, + source: imageData.source || "docker-hub", + created_at: parseDate(imageData.created_at), + updated_at: now, + }, + }); + imagesProcessed++; + } + } + + // Process updates + if (updates && Array.isArray(updates)) { + console.log(`[Docker Integration] Processing ${updates.length} updates`); + for (const updateData of updates) { + // Find the image by repository and image_id + const image = await prisma.docker_images.findFirst({ + where: { + repository: updateData.repository, + tag: updateData.current_tag, + image_id: updateData.image_id, + }, + }); + + if (image) { + // Store digest info in changelog_url field as JSON + const digestInfo = JSON.stringify({ + method: "digest_comparison", + current_digest: updateData.current_digest, + available_digest: updateData.available_digest, + }); + + // Upsert the update record + await prisma.docker_image_updates.upsert({ + where: { + image_id_available_tag: { + image_id: image.id, + available_tag: updateData.available_tag, + }, + }, + update: { + updated_at: now, + changelog_url: digestInfo, + severity: "digest_changed", + }, + create: { + id: uuidv4(), + image_id: image.id, + current_tag: updateData.current_tag, + available_tag: updateData.available_tag, + severity: "digest_changed", + changelog_url: digestInfo, + updated_at: now, + }, + }); + updatesProcessed++; + } + } + } + + console.log( + `[Docker Integration] Successfully processed: ${containersProcessed} containers, ${imagesProcessed} images, ${updatesProcessed} updates`, + ); + + res.json({ + message: "Docker data collected successfully", + containers_received: containersProcessed, + images_received: imagesProcessed, + updates_found: updatesProcessed, + }); + } catch (error) { + console.error("[Docker Integration] Error collecting Docker data:", error); + console.error("[Docker Integration] Error stack:", error.stack); + res.status(500).json({ + error: "Failed to collect Docker data", + message: error.message, + details: process.env.NODE_ENV === "development" ? error.stack : undefined, + }); + } +}); + +module.exports = router; diff --git a/backend/src/routes/metricsRoutes.js b/backend/src/routes/metricsRoutes.js new file mode 100644 index 0000000..6c45279 --- /dev/null +++ b/backend/src/routes/metricsRoutes.js @@ -0,0 +1,148 @@ +const express = require("express"); +const { body, validationResult } = require("express-validator"); +const { v4: uuidv4 } = require("uuid"); +const { authenticateToken } = require("../middleware/auth"); +const { requireManageSettings } = require("../middleware/permissions"); +const { getSettings, updateSettings } = require("../services/settingsService"); +const { queueManager, QUEUE_NAMES } = require("../services/automation"); + +const router = express.Router(); + +// Get metrics settings +router.get("/", authenticateToken, requireManageSettings, async (_req, res) => { + try { + const settings = await getSettings(); + + // Generate anonymous ID if it doesn't exist + if (!settings.metrics_anonymous_id) { + const anonymousId = uuidv4(); + await updateSettings(settings.id, { + metrics_anonymous_id: anonymousId, + }); + settings.metrics_anonymous_id = anonymousId; + } + + res.json({ + metrics_enabled: settings.metrics_enabled ?? true, + metrics_anonymous_id: settings.metrics_anonymous_id, + metrics_last_sent: settings.metrics_last_sent, + }); + } catch (error) { + console.error("Metrics settings fetch error:", error); + res.status(500).json({ error: "Failed to fetch metrics settings" }); + } +}); + +// Update metrics settings +router.put( + "/", + authenticateToken, + requireManageSettings, + [ + body("metrics_enabled") + .isBoolean() + .withMessage("Metrics enabled must be a boolean"), + ], + async (req, res) => { + try { + const errors = validationResult(req); + if (!errors.isEmpty()) { + return res.status(400).json({ errors: errors.array() }); + } + + const { metrics_enabled } = req.body; + const settings = await getSettings(); + + await updateSettings(settings.id, { + metrics_enabled, + }); + + console.log( + `Metrics ${metrics_enabled ? "enabled" : "disabled"} by user`, + ); + + res.json({ + message: "Metrics settings updated successfully", + metrics_enabled, + }); + } catch (error) { + console.error("Metrics settings update error:", error); + res.status(500).json({ error: "Failed to update metrics settings" }); + } + }, +); + +// Regenerate anonymous ID +router.post( + "/regenerate-id", + authenticateToken, + requireManageSettings, + async (_req, res) => { + try { + const settings = await getSettings(); + const newAnonymousId = uuidv4(); + + await updateSettings(settings.id, { + metrics_anonymous_id: newAnonymousId, + }); + + console.log("Anonymous ID regenerated"); + + res.json({ + message: "Anonymous ID regenerated successfully", + metrics_anonymous_id: newAnonymousId, + }); + } catch (error) { + console.error("Anonymous ID regeneration error:", error); + res.status(500).json({ error: "Failed to regenerate anonymous ID" }); + } + }, +); + +// Manually send metrics now +router.post( + "/send-now", + authenticateToken, + requireManageSettings, + async (_req, res) => { + try { + const settings = await getSettings(); + + if (!settings.metrics_enabled) { + return res.status(400).json({ + error: "Metrics are disabled. Please enable metrics first.", + }); + } + + // Trigger metrics directly (no queue delay for manual trigger) + const metricsReporting = + queueManager.automations[QUEUE_NAMES.METRICS_REPORTING]; + const result = await metricsReporting.process( + { name: "manual-send" }, + false, + ); + + if (result.success) { + console.log("โœ… Manual metrics sent successfully"); + res.json({ + message: "Metrics sent successfully", + data: result, + }); + } else { + console.error("โŒ Failed to send metrics:", result); + res.status(500).json({ + error: "Failed to send metrics", + details: result.reason || result.error, + }); + } + } catch (error) { + console.error("Send metrics error:", error); + res.status(500).json({ + error: "Failed to send metrics", + details: error.message, + }); + } + }, +); + +module.exports = router; diff --git a/backend/src/routes/packageRoutes.js b/backend/src/routes/packageRoutes.js index c65aac4..cf25841 100644 --- a/backend/src/routes/packageRoutes.js +++ b/backend/src/routes/packageRoutes.js @@ -101,74 +101,107 @@ router.get("/", async (req, res) => { prisma.packages.count({ where }), ]); - // Get additional stats for each package - const packagesWithStats = await Promise.all( - packages.map(async (pkg) => { - // Build base where clause for this package - const baseWhere = { package_id: pkg.id }; + // OPTIMIZATION: Batch query all stats instead of N individual queries + const packageIds = packages.map((pkg) => pkg.id); - // If host filter is specified, add host filter to all queries - const hostWhere = host ? { ...baseWhere, host_id: host } : baseWhere; - - const [updatesCount, securityCount, packageHosts] = await Promise.all([ - prisma.host_packages.count({ - where: { - ...hostWhere, - needs_update: true, - }, - }), - prisma.host_packages.count({ - where: { - ...hostWhere, - needs_update: true, - is_security_update: true, - }, - }), - prisma.host_packages.findMany({ - where: { - ...hostWhere, - // If host filter is specified, include all packages for that host - // Otherwise, only include packages that need updates - ...(host ? {} : { needs_update: true }), - }, - select: { - hosts: { - select: { - id: true, - friendly_name: true, - hostname: true, - os_type: true, - }, - }, - current_version: true, - available_version: true, - needs_update: true, - is_security_update: true, - }, - take: 10, // Limit to first 10 for performance - }), - ]); - - return { - ...pkg, - packageHostsCount: pkg._count.host_packages, - packageHosts: packageHosts.map((hp) => ({ - hostId: hp.hosts.id, - friendlyName: hp.hosts.friendly_name, - osType: hp.hosts.os_type, - currentVersion: hp.current_version, - availableVersion: hp.available_version, - needsUpdate: hp.needs_update, - isSecurityUpdate: hp.is_security_update, - })), - stats: { - totalInstalls: pkg._count.host_packages, - updatesNeeded: updatesCount, - securityUpdates: securityCount, + // Get all counts and host data in 3 batch queries instead of N*3 queries + const [allUpdatesCounts, allSecurityCounts, allPackageHostsData] = + await Promise.all([ + // Batch count all packages that need updates + prisma.host_packages.groupBy({ + by: ["package_id"], + where: { + package_id: { in: packageIds }, + needs_update: true, + ...(host ? { host_id: host } : {}), }, - }; - }), + _count: { id: true }, + }), + // Batch count all packages with security updates + prisma.host_packages.groupBy({ + by: ["package_id"], + where: { + package_id: { in: packageIds }, + needs_update: true, + is_security_update: true, + ...(host ? { host_id: host } : {}), + }, + _count: { id: true }, + }), + // Batch fetch all host data for packages + prisma.host_packages.findMany({ + where: { + package_id: { in: packageIds }, + ...(host ? { host_id: host } : { needs_update: true }), + }, + select: { + package_id: true, + hosts: { + select: { + id: true, + friendly_name: true, + hostname: true, + os_type: true, + }, + }, + current_version: true, + available_version: true, + needs_update: true, + is_security_update: true, + }, + // Limit to first 10 per package + take: 100, // Increased from package-based limit + }), + ]); + + // Create lookup maps for O(1) access + const updatesCountMap = new Map( + allUpdatesCounts.map((item) => [item.package_id, item._count.id]), ); + const securityCountMap = new Map( + allSecurityCounts.map((item) => [item.package_id, item._count.id]), + ); + const packageHostsMap = new Map(); + + // Group host data by package_id + for (const hp of allPackageHostsData) { + if (!packageHostsMap.has(hp.package_id)) { + packageHostsMap.set(hp.package_id, []); + } + const hosts = packageHostsMap.get(hp.package_id); + hosts.push({ + hostId: hp.hosts.id, + friendlyName: hp.hosts.friendly_name, + osType: hp.hosts.os_type, + currentVersion: hp.current_version, + availableVersion: hp.available_version, + needsUpdate: hp.needs_update, + isSecurityUpdate: hp.is_security_update, + }); + + // Limit to 10 hosts per package + if (hosts.length > 10) { + packageHostsMap.set(hp.package_id, hosts.slice(0, 10)); + } + } + + // Map packages with stats from lookup maps (no more DB queries!) + const packagesWithStats = packages.map((pkg) => { + const updatesCount = updatesCountMap.get(pkg.id) || 0; + const securityCount = securityCountMap.get(pkg.id) || 0; + const packageHosts = packageHostsMap.get(pkg.id) || []; + + return { + ...pkg, + packageHostsCount: pkg._count.host_packages, + packageHosts, + stats: { + totalInstalls: pkg._count.host_packages, + updatesNeeded: updatesCount, + securityUpdates: securityCount, + }, + }; + }); res.json({ packages: packagesWithStats, diff --git a/backend/src/routes/settingsRoutes.js b/backend/src/routes/settingsRoutes.js index d2a782d..a865e0e 100644 --- a/backend/src/routes/settingsRoutes.js +++ b/backend/src/routes/settingsRoutes.js @@ -158,6 +158,7 @@ router.put( logoDark, logoLight, favicon, + colorTheme, } = req.body; // Get current settings to check for update interval changes @@ -189,6 +190,7 @@ router.put( if (logoDark !== undefined) updateData.logo_dark = logoDark; if (logoLight !== undefined) updateData.logo_light = logoLight; if (favicon !== undefined) updateData.favicon = favicon; + if (colorTheme !== undefined) updateData.color_theme = colorTheme; const updatedSettings = await updateSettings( currentSettings.id, diff --git a/backend/src/routes/versionRoutes.js b/backend/src/routes/versionRoutes.js index 72a39ac..23217a1 100644 --- a/backend/src/routes/versionRoutes.js +++ b/backend/src/routes/versionRoutes.js @@ -14,13 +14,16 @@ const router = express.Router(); function getCurrentVersion() { try { const packageJson = require("../../package.json"); - return packageJson?.version || "1.3.0"; + if (!packageJson?.version) { + throw new Error("Version not found in package.json"); + } + return packageJson.version; } catch (packageError) { - console.warn( - "Could not read version from package.json, using fallback:", + console.error( + "Could not read version from package.json:", packageError.message, ); - return "1.3.0"; + return "unknown"; } } diff --git a/backend/src/routes/wsRoutes.js b/backend/src/routes/wsRoutes.js index 4cfd78a..86ecf81 100644 --- a/backend/src/routes/wsRoutes.js +++ b/backend/src/routes/wsRoutes.js @@ -11,7 +11,31 @@ const { const router = express.Router(); -// Get WebSocket connection status by api_id (no database access - pure memory lookup) +// Get WebSocket connection status for multiple hosts at once (bulk endpoint) +router.get("/status", authenticateToken, async (req, res) => { + try { + const { apiIds } = req.query; // Comma-separated list of api_ids + const idArray = apiIds ? apiIds.split(",").filter((id) => id.trim()) : []; + + const statusMap = {}; + idArray.forEach((apiId) => { + statusMap[apiId] = getConnectionInfo(apiId); + }); + + res.json({ + success: true, + data: statusMap, + }); + } catch (error) { + console.error("Error fetching bulk WebSocket status:", error); + res.status(500).json({ + success: false, + error: "Failed to fetch WebSocket status", + }); + } +}); + +// Get WebSocket connection status by api_id (single endpoint) router.get("/status/:apiId", authenticateToken, async (req, res) => { try { const { apiId } = req.params; diff --git a/backend/src/server.js b/backend/src/server.js index d13d3de..81bbc6d 100644 --- a/backend/src/server.js +++ b/backend/src/server.js @@ -66,8 +66,10 @@ const autoEnrollmentRoutes = require("./routes/autoEnrollmentRoutes"); const gethomepageRoutes = require("./routes/gethomepageRoutes"); const automationRoutes = require("./routes/automationRoutes"); const dockerRoutes = require("./routes/dockerRoutes"); +const integrationRoutes = require("./routes/integrationRoutes"); const wsRoutes = require("./routes/wsRoutes"); const agentVersionRoutes = require("./routes/agentVersionRoutes"); +const metricsRoutes = require("./routes/metricsRoutes"); const { initSettings } = require("./services/settingsService"); const { queueManager } = require("./services/automation"); const { authenticateToken, requireAdmin } = require("./middleware/auth"); @@ -471,8 +473,10 @@ app.use( app.use(`/api/${apiVersion}/gethomepage`, gethomepageRoutes); app.use(`/api/${apiVersion}/automation`, automationRoutes); app.use(`/api/${apiVersion}/docker`, dockerRoutes); +app.use(`/api/${apiVersion}/integrations`, integrationRoutes); app.use(`/api/${apiVersion}/ws`, wsRoutes); app.use(`/api/${apiVersion}/agent`, agentVersionRoutes); +app.use(`/api/${apiVersion}/metrics`, metricsRoutes); // Bull Board - will be populated after queue manager initializes let bullBoardRouter = null; @@ -1198,6 +1202,15 @@ async function startServer() { initAgentWs(server, prisma); await agentVersionService.initialize(); + // Send metrics on startup (silent - no console output) + try { + const metricsReporting = + queueManager.automations[QUEUE_NAMES.METRICS_REPORTING]; + await metricsReporting.sendSilent(); + } catch (_error) { + // Silent failure - don't block server startup if metrics fail + } + server.listen(PORT, () => { if (process.env.ENABLE_LOGGING === "true") { logger.info(`Server running on port ${PORT}`); diff --git a/backend/src/services/agentVersionService.js b/backend/src/services/agentVersionService.js index 8187cb9..bd2a53a 100644 --- a/backend/src/services/agentVersionService.js +++ b/backend/src/services/agentVersionService.js @@ -428,26 +428,29 @@ class AgentVersionService { async getVersionInfo() { let hasUpdate = false; let updateStatus = "unknown"; - let effectiveLatestVersion = this.currentVersion; // Always use local version if available - // If we have a local version, use it as the latest regardless of GitHub - if (this.currentVersion) { - effectiveLatestVersion = this.currentVersion; + // Latest version should ALWAYS come from GitHub, not from local binaries + // currentVersion = what's installed locally + // latestVersion = what's available on GitHub + if (this.latestVersion) { + console.log(`๐Ÿ“ฆ Latest version from GitHub: ${this.latestVersion}`); + } else { console.log( - `๐Ÿ”„ Using local agent version ${this.currentVersion} as latest`, - ); - } else if (this.latestVersion) { - // Fallback to GitHub version only if no local version - effectiveLatestVersion = this.latestVersion; - console.log( - `๐Ÿ”„ No local version found, using GitHub version ${this.latestVersion}`, + `โš ๏ธ No GitHub release version available (API may be unavailable)`, ); } - if (this.currentVersion && effectiveLatestVersion) { + if (this.currentVersion) { + console.log(`๐Ÿ’พ Current local agent version: ${this.currentVersion}`); + } else { + console.log(`โš ๏ธ No local agent binary found`); + } + + // Determine update status by comparing current vs latest (from GitHub) + if (this.currentVersion && this.latestVersion) { const comparison = compareVersions( this.currentVersion, - effectiveLatestVersion, + this.latestVersion, ); if (comparison < 0) { hasUpdate = true; @@ -459,25 +462,25 @@ class AgentVersionService { hasUpdate = false; updateStatus = "up-to-date"; } - } else if (effectiveLatestVersion && !this.currentVersion) { + } else if (this.latestVersion && !this.currentVersion) { hasUpdate = true; updateStatus = "no-agent"; - } else if (this.currentVersion && !effectiveLatestVersion) { + } else if (this.currentVersion && !this.latestVersion) { // We have a current version but no latest version (GitHub API unavailable) hasUpdate = false; updateStatus = "github-unavailable"; - } else if (!this.currentVersion && !effectiveLatestVersion) { + } else if (!this.currentVersion && !this.latestVersion) { updateStatus = "no-data"; } return { currentVersion: this.currentVersion, - latestVersion: effectiveLatestVersion, + latestVersion: this.latestVersion, // Always return GitHub version, not local hasUpdate: hasUpdate, updateStatus: updateStatus, lastChecked: this.lastChecked, supportedArchitectures: this.supportedArchitectures, - status: effectiveLatestVersion ? "ready" : "no-releases", + status: this.latestVersion ? "ready" : "no-releases", }; } diff --git a/backend/src/services/agentWs.js b/backend/src/services/agentWs.js index 9b28e81..1d7fc61 100644 --- a/backend/src/services/agentWs.js +++ b/backend/src/services/agentWs.js @@ -99,8 +99,22 @@ function init(server, prismaClient) { // Notify subscribers of connection notifyConnectionChange(apiId, true); - ws.on("message", () => { - // Currently we don't need to handle agent->server messages + ws.on("message", async (data) => { + // Handle incoming messages from agent (e.g., Docker status updates) + try { + const message = JSON.parse(data.toString()); + + if (message.type === "docker_status") { + // Handle Docker container status events + await handleDockerStatusEvent(apiId, message); + } + // Add more message types here as needed + } catch (err) { + console.error( + `[agent-ws] error parsing message from ${apiId}:`, + err, + ); + } }); ws.on("close", () => { @@ -255,6 +269,62 @@ function subscribeToConnectionChanges(apiId, callback) { }; } +// Handle Docker container status events from agent +async function handleDockerStatusEvent(apiId, message) { + try { + const { event: _event, container_id, name, status, timestamp } = message; + + console.log( + `[Docker Event] ${apiId}: Container ${name} (${container_id}) - ${status}`, + ); + + // Find the host + const host = await prisma.hosts.findUnique({ + where: { api_id: apiId }, + }); + + if (!host) { + console.error(`[Docker Event] Host not found for api_id: ${apiId}`); + return; + } + + // Update container status in database + const container = await prisma.docker_containers.findUnique({ + where: { + host_id_container_id: { + host_id: host.id, + container_id: container_id, + }, + }, + }); + + if (container) { + await prisma.docker_containers.update({ + where: { id: container.id }, + data: { + status: status, + state: status, + updated_at: new Date(timestamp || Date.now()), + last_checked: new Date(), + }, + }); + + console.log( + `[Docker Event] Updated container ${name} status to ${status}`, + ); + } else { + console.log( + `[Docker Event] Container ${name} not found in database (may be new)`, + ); + } + + // TODO: Broadcast to connected dashboard clients via SSE or WebSocket + // This would notify the frontend UI in real-time + } catch (error) { + console.error(`[Docker Event] Error handling Docker status event:`, error); + } +} + module.exports = { init, broadcastSettingsUpdate, diff --git a/backend/src/services/automation/dockerInventoryCleanup.js b/backend/src/services/automation/dockerInventoryCleanup.js new file mode 100644 index 0000000..943bc9e --- /dev/null +++ b/backend/src/services/automation/dockerInventoryCleanup.js @@ -0,0 +1,164 @@ +const { prisma } = require("./shared/prisma"); + +/** + * Docker Inventory Cleanup Automation + * Removes Docker containers and images for hosts that no longer exist + */ +class DockerInventoryCleanup { + constructor(queueManager) { + this.queueManager = queueManager; + this.queueName = "docker-inventory-cleanup"; + } + + /** + * Process Docker inventory cleanup job + */ + async process(_job) { + const startTime = Date.now(); + console.log("๐Ÿงน Starting Docker inventory cleanup..."); + + try { + // Step 1: Find and delete orphaned containers (containers for non-existent hosts) + const orphanedContainers = await prisma.docker_containers.findMany({ + where: { + host_id: { + // Find containers where the host doesn't exist + notIn: await prisma.hosts + .findMany({ select: { id: true } }) + .then((hosts) => hosts.map((h) => h.id)), + }, + }, + }); + + let deletedContainersCount = 0; + const deletedContainers = []; + + for (const container of orphanedContainers) { + try { + await prisma.docker_containers.delete({ + where: { id: container.id }, + }); + deletedContainersCount++; + deletedContainers.push({ + id: container.id, + container_id: container.container_id, + name: container.name, + image_name: container.image_name, + host_id: container.host_id, + }); + console.log( + `๐Ÿ—‘๏ธ Deleted orphaned container: ${container.name} (host_id: ${container.host_id})`, + ); + } catch (deleteError) { + console.error( + `โŒ Failed to delete container ${container.id}:`, + deleteError.message, + ); + } + } + + // Step 2: Find and delete orphaned images (images with no containers using them) + const orphanedImages = await prisma.docker_images.findMany({ + where: { + docker_containers: { + none: {}, + }, + }, + include: { + _count: { + select: { + docker_containers: true, + docker_image_updates: true, + }, + }, + }, + }); + + let deletedImagesCount = 0; + const deletedImages = []; + + for (const image of orphanedImages) { + try { + // First delete any image updates associated with this image + if (image._count.docker_image_updates > 0) { + await prisma.docker_image_updates.deleteMany({ + where: { image_id: image.id }, + }); + } + + // Then delete the image itself + await prisma.docker_images.delete({ + where: { id: image.id }, + }); + deletedImagesCount++; + deletedImages.push({ + id: image.id, + repository: image.repository, + tag: image.tag, + image_id: image.image_id, + }); + console.log( + `๐Ÿ—‘๏ธ Deleted orphaned image: ${image.repository}:${image.tag}`, + ); + } catch (deleteError) { + console.error( + `โŒ Failed to delete image ${image.id}:`, + deleteError.message, + ); + } + } + + const executionTime = Date.now() - startTime; + console.log( + `โœ… Docker inventory cleanup completed in ${executionTime}ms - Deleted ${deletedContainersCount} containers and ${deletedImagesCount} images`, + ); + + return { + success: true, + deletedContainersCount, + deletedImagesCount, + deletedContainers, + deletedImages, + executionTime, + }; + } catch (error) { + const executionTime = Date.now() - startTime; + console.error( + `โŒ Docker inventory cleanup failed after ${executionTime}ms:`, + error.message, + ); + throw error; + } + } + + /** + * Schedule recurring Docker inventory cleanup (daily at 4 AM) + */ + async schedule() { + const job = await this.queueManager.queues[this.queueName].add( + "docker-inventory-cleanup", + {}, + { + repeat: { cron: "0 4 * * *" }, // Daily at 4 AM + jobId: "docker-inventory-cleanup-recurring", + }, + ); + console.log("โœ… Docker inventory cleanup scheduled"); + return job; + } + + /** + * Trigger manual Docker inventory cleanup + */ + async triggerManual() { + const job = await this.queueManager.queues[this.queueName].add( + "docker-inventory-cleanup-manual", + {}, + { priority: 1 }, + ); + console.log("โœ… Manual Docker inventory cleanup triggered"); + return job; + } +} + +module.exports = DockerInventoryCleanup; diff --git a/backend/src/services/automation/githubUpdateCheck.js b/backend/src/services/automation/githubUpdateCheck.js index 8148d0c..2f5a3d4 100644 --- a/backend/src/services/automation/githubUpdateCheck.js +++ b/backend/src/services/automation/githubUpdateCheck.js @@ -52,17 +52,24 @@ class GitHubUpdateCheck { } // Read version from package.json - let currentVersion = "1.3.0"; // fallback + let currentVersion = null; try { const packageJson = require("../../../package.json"); if (packageJson?.version) { currentVersion = packageJson.version; } } catch (packageError) { - console.warn( + console.error( "Could not read version from package.json:", packageError.message, ); + throw new Error( + "Could not determine current version from package.json", + ); + } + + if (!currentVersion) { + throw new Error("Version not found in package.json"); } const isUpdateAvailable = diff --git a/backend/src/services/automation/index.js b/backend/src/services/automation/index.js index b8c670c..ff40751 100644 --- a/backend/src/services/automation/index.js +++ b/backend/src/services/automation/index.js @@ -8,6 +8,8 @@ const GitHubUpdateCheck = require("./githubUpdateCheck"); const SessionCleanup = require("./sessionCleanup"); const OrphanedRepoCleanup = require("./orphanedRepoCleanup"); const OrphanedPackageCleanup = require("./orphanedPackageCleanup"); +const DockerInventoryCleanup = require("./dockerInventoryCleanup"); +const MetricsReporting = require("./metricsReporting"); // Queue names const QUEUE_NAMES = { @@ -15,6 +17,8 @@ const QUEUE_NAMES = { SESSION_CLEANUP: "session-cleanup", ORPHANED_REPO_CLEANUP: "orphaned-repo-cleanup", ORPHANED_PACKAGE_CLEANUP: "orphaned-package-cleanup", + DOCKER_INVENTORY_CLEANUP: "docker-inventory-cleanup", + METRICS_REPORTING: "metrics-reporting", AGENT_COMMANDS: "agent-commands", }; @@ -91,6 +95,11 @@ class QueueManager { new OrphanedRepoCleanup(this); this.automations[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP] = new OrphanedPackageCleanup(this); + this.automations[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP] = + new DockerInventoryCleanup(this); + this.automations[QUEUE_NAMES.METRICS_REPORTING] = new MetricsReporting( + this, + ); console.log("โœ… All automation classes initialized"); } @@ -149,6 +158,24 @@ class QueueManager { workerOptions, ); + // Docker Inventory Cleanup Worker + this.workers[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP] = new Worker( + QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP, + this.automations[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].process.bind( + this.automations[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP], + ), + workerOptions, + ); + + // Metrics Reporting Worker + this.workers[QUEUE_NAMES.METRICS_REPORTING] = new Worker( + QUEUE_NAMES.METRICS_REPORTING, + this.automations[QUEUE_NAMES.METRICS_REPORTING].process.bind( + this.automations[QUEUE_NAMES.METRICS_REPORTING], + ), + workerOptions, + ); + // Agent Commands Worker this.workers[QUEUE_NAMES.AGENT_COMMANDS] = new Worker( QUEUE_NAMES.AGENT_COMMANDS, @@ -205,6 +232,8 @@ class QueueManager { await this.automations[QUEUE_NAMES.SESSION_CLEANUP].schedule(); await this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].schedule(); await this.automations[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].schedule(); + await this.automations[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].schedule(); + await this.automations[QUEUE_NAMES.METRICS_REPORTING].schedule(); } /** @@ -228,6 +257,16 @@ class QueueManager { ].triggerManual(); } + async triggerDockerInventoryCleanup() { + return this.automations[ + QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP + ].triggerManual(); + } + + async triggerMetricsReporting() { + return this.automations[QUEUE_NAMES.METRICS_REPORTING].triggerManual(); + } + /** * Get queue statistics */ diff --git a/backend/src/services/automation/metricsReporting.js b/backend/src/services/automation/metricsReporting.js new file mode 100644 index 0000000..3ebfa52 --- /dev/null +++ b/backend/src/services/automation/metricsReporting.js @@ -0,0 +1,172 @@ +const axios = require("axios"); +const { prisma } = require("./shared/prisma"); +const { updateSettings } = require("../../services/settingsService"); + +const METRICS_API_URL = + process.env.METRICS_API_URL || "https://metrics.patchmon.cloud"; + +/** + * Metrics Reporting Automation + * Sends anonymous usage metrics every 24 hours + */ +class MetricsReporting { + constructor(queueManager) { + this.queueManager = queueManager; + this.queueName = "metrics-reporting"; + } + + /** + * Process metrics reporting job + */ + async process(_job, silent = false) { + const startTime = Date.now(); + if (!silent) console.log("๐Ÿ“Š Starting metrics reporting..."); + + try { + // Fetch fresh settings directly from database (bypass cache) + const settings = await prisma.settings.findFirst({ + orderBy: { updated_at: "desc" }, + }); + + // Check if metrics are enabled + if (settings.metrics_enabled !== true) { + if (!silent) console.log("๐Ÿ“Š Metrics reporting is disabled"); + return { success: false, reason: "disabled" }; + } + + // Check if we have an anonymous ID + if (!settings.metrics_anonymous_id) { + if (!silent) console.log("๐Ÿ“Š No anonymous ID found, skipping metrics"); + return { success: false, reason: "no_id" }; + } + + // Get host count + const hostCount = await prisma.hosts.count(); + + // Get version + const packageJson = require("../../../package.json"); + const version = packageJson.version; + + // Prepare metrics data + const metricsData = { + anonymous_id: settings.metrics_anonymous_id, + host_count: hostCount, + version, + }; + + if (!silent) + console.log( + `๐Ÿ“Š Sending metrics: ${hostCount} hosts, version ${version}`, + ); + + // Send to metrics API + try { + const response = await axios.post( + `${METRICS_API_URL}/metrics/submit`, + metricsData, + { + timeout: 10000, + headers: { + "Content-Type": "application/json", + }, + }, + ); + + // Update last sent timestamp + await updateSettings(settings.id, { + metrics_last_sent: new Date(), + }); + + const executionTime = Date.now() - startTime; + if (!silent) + console.log( + `โœ… Metrics sent successfully in ${executionTime}ms:`, + response.data, + ); + + return { + success: true, + data: response.data, + hostCount, + version, + executionTime, + }; + } catch (apiError) { + const executionTime = Date.now() - startTime; + if (!silent) + console.error( + `โŒ Failed to send metrics to API after ${executionTime}ms:`, + apiError.message, + ); + return { + success: false, + reason: "api_error", + error: apiError.message, + executionTime, + }; + } + } catch (error) { + const executionTime = Date.now() - startTime; + if (!silent) + console.error( + `โŒ Error in metrics reporting after ${executionTime}ms:`, + error.message, + ); + // Don't throw on silent mode, just return failure + if (silent) { + return { + success: false, + reason: "error", + error: error.message, + executionTime, + }; + } + throw error; + } + } + + /** + * Schedule recurring metrics reporting (daily at 2 AM) + */ + async schedule() { + const job = await this.queueManager.queues[this.queueName].add( + "metrics-reporting", + {}, + { + repeat: { cron: "0 2 * * *" }, // Daily at 2 AM + jobId: "metrics-reporting-recurring", + }, + ); + console.log("โœ… Metrics reporting scheduled (daily at 2 AM)"); + return job; + } + + /** + * Trigger manual metrics reporting + */ + async triggerManual() { + const job = await this.queueManager.queues[this.queueName].add( + "metrics-reporting-manual", + {}, + { priority: 1 }, + ); + console.log("โœ… Manual metrics reporting triggered"); + return job; + } + + /** + * Send metrics immediately (silent mode) + * Used for automatic sending on server startup + */ + async sendSilent() { + try { + const result = await this.process({ name: "startup-silent" }, true); + return result; + } catch (error) { + // Silent failure on startup + return { success: false, reason: "error", error: error.message }; + } + } +} + +module.exports = MetricsReporting; diff --git a/backend/src/services/automation/shared/utils.js b/backend/src/services/automation/shared/utils.js index 87a7f16..0b877c0 100644 --- a/backend/src/services/automation/shared/utils.js +++ b/backend/src/services/automation/shared/utils.js @@ -33,7 +33,8 @@ async function checkPublicRepo(owner, repo) { try { const httpsRepoUrl = `https://api.github.com/repos/${owner}/${repo}/releases/latest`; - let currentVersion = "1.3.0"; // fallback + // Get current version for User-Agent (or use generic if unavailable) + let currentVersion = "unknown"; try { const packageJson = require("../../../package.json"); if (packageJson?.version) { @@ -41,7 +42,7 @@ async function checkPublicRepo(owner, repo) { } } catch (packageError) { console.warn( - "Could not read version from package.json for User-Agent, using fallback:", + "Could not read version from package.json for User-Agent:", packageError.message, ); } diff --git a/biome.json b/biome.json index abe98bd..29a6487 100644 --- a/biome.json +++ b/biome.json @@ -1,10 +1,13 @@ { - "$schema": "https://biomejs.dev/schemas/2.2.4/schema.json", + "$schema": "https://biomejs.dev/schemas/2.3.0/schema.json", "vcs": { "enabled": true, "clientKind": "git", "useIgnoreFile": true }, + "files": { + "includes": ["**", "!**/*.css"] + }, "formatter": { "enabled": true }, diff --git a/docker/README.md b/docker/README.md index 952b0a5..32409c4 100644 --- a/docker/README.md +++ b/docker/README.md @@ -136,6 +136,24 @@ When you do this, updating to a new version requires manually updating the image | `PM_DB_CONN_MAX_ATTEMPTS` | Maximum database connection attempts | `30` | | `PM_DB_CONN_WAIT_INTERVAL` | Wait interval between connection attempts in seconds | `2` | +##### Database Connection Pool Configuration (Prisma) + +| Variable | Description | Default | +| --------------------- | ---------------------------------------------------------- | ------- | +| `DB_CONNECTION_LIMIT` | Maximum number of database connections per instance | `30` | +| `DB_POOL_TIMEOUT` | Seconds to wait for an available connection before timeout | `20` | +| `DB_CONNECT_TIMEOUT` | Seconds to wait for initial database connection | `10` | +| `DB_IDLE_TIMEOUT` | Seconds before closing idle connections | `300` | +| `DB_MAX_LIFETIME` | Maximum lifetime of a connection in seconds | `1800` | + +> [!TIP] +> The connection pool limit should be adjusted based on your deployment size: +> - **Small deployment (1-10 hosts)**: `DB_CONNECTION_LIMIT=15` is sufficient +> - **Medium deployment (10-50 hosts)**: `DB_CONNECTION_LIMIT=30` (default) +> - **Large deployment (50+ hosts)**: `DB_CONNECTION_LIMIT=50` or higher +> +> Each connection pool serves one backend instance. If you have concurrent operations (multiple users, background jobs, agent checkins), increase the pool size accordingly. + ##### Redis Configuration | Variable | Description | Default | diff --git a/docker/backend.Dockerfile b/docker/backend.Dockerfile index d4992d2..67b165e 100644 --- a/docker/backend.Dockerfile +++ b/docker/backend.Dockerfile @@ -46,8 +46,10 @@ COPY --chown=node:node backend/ ./backend/ WORKDIR /app/backend -RUN npm ci --ignore-scripts &&\ - npx prisma generate &&\ +RUN npm cache clean --force &&\ + rm -rf node_modules ~/.npm /root/.npm &&\ + npm ci --ignore-scripts --legacy-peer-deps --no-audit --prefer-online --fetch-retries=0 &&\ + PRISMA_CLI_BINARY_TYPE=binary npm run db:generate &&\ npm prune --omit=dev &&\ npm cache clean --force diff --git a/docker/docker-compose.dev.yml b/docker/docker-compose.dev.yml index 1568cc1..0a5b34c 100644 --- a/docker/docker-compose.dev.yml +++ b/docker/docker-compose.dev.yml @@ -50,6 +50,12 @@ services: SERVER_HOST: localhost SERVER_PORT: 3000 CORS_ORIGIN: http://localhost:3000 + # Database Connection Pool Configuration (Prisma) + DB_CONNECTION_LIMIT: 30 + DB_POOL_TIMEOUT: 20 + DB_CONNECT_TIMEOUT: 10 + DB_IDLE_TIMEOUT: 300 + DB_MAX_LIFETIME: 1800 # Rate Limiting (times in milliseconds) RATE_LIMIT_WINDOW_MS: 900000 RATE_LIMIT_MAX: 5000 diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 29f08e9..aed06f9 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -56,6 +56,12 @@ services: SERVER_HOST: localhost SERVER_PORT: 3000 CORS_ORIGIN: http://localhost:3000 + # Database Connection Pool Configuration (Prisma) + DB_CONNECTION_LIMIT: 30 + DB_POOL_TIMEOUT: 20 + DB_CONNECT_TIMEOUT: 10 + DB_IDLE_TIMEOUT: 300 + DB_MAX_LIFETIME: 1800 # Rate Limiting (times in milliseconds) RATE_LIMIT_WINDOW_MS: 900000 RATE_LIMIT_MAX: 5000 diff --git a/docker/frontend.Dockerfile b/docker/frontend.Dockerfile index fd02ec0..e63d714 100644 --- a/docker/frontend.Dockerfile +++ b/docker/frontend.Dockerfile @@ -17,16 +17,17 @@ CMD ["npm", "run", "dev", "--", "--host", "0.0.0.0", "--port", "3000"] # Builder stage for production FROM node:lts-alpine AS builder -WORKDIR /app +WORKDIR /app/frontend -COPY package*.json ./ -COPY frontend/package*.json ./frontend/ +COPY frontend/package*.json ./ -RUN npm ci --ignore-scripts +RUN npm cache clean --force &&\ + rm -rf node_modules ~/.npm /root/.npm &&\ + npm install --ignore-scripts --legacy-peer-deps --no-audit --prefer-online --fetch-retries=0 -COPY frontend/ ./frontend/ +COPY frontend/ ./ -RUN npm run build:frontend +RUN npm run build # Production stage FROM nginxinc/nginx-unprivileged:alpine diff --git a/frontend/env.example b/frontend/env.example index 10d606b..4f2aa12 100644 --- a/frontend/env.example +++ b/frontend/env.example @@ -6,5 +6,5 @@ VITE_API_URL=http://localhost:3001/api/v1 # Application Metadata VITE_APP_NAME=PatchMon -VITE_APP_VERSION=1.3.0 +VITE_APP_VERSION=1.3.1 diff --git a/frontend/package.json b/frontend/package.json index 0da7ae1..fc4443e 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -1,7 +1,7 @@ { "name": "patchmon-frontend", "private": true, - "version": "1.3.0", + "version": "1.3.1", "license": "AGPL-3.0", "type": "module", "scripts": { @@ -27,7 +27,8 @@ "react-chartjs-2": "^5.2.0", "react-dom": "^18.3.1", "react-icons": "^5.5.0", - "react-router-dom": "^6.30.1" + "react-router-dom": "^6.30.1", + "trianglify": "^4.1.1" }, "devDependencies": { "@types/react": "^18.3.14", diff --git a/frontend/src/App.jsx b/frontend/src/App.jsx index 0bc138c..bcc31af 100644 --- a/frontend/src/App.jsx +++ b/frontend/src/App.jsx @@ -7,6 +7,7 @@ import ProtectedRoute from "./components/ProtectedRoute"; import SettingsLayout from "./components/SettingsLayout"; import { isAuthPhase } from "./constants/authPhases"; import { AuthProvider, useAuth } from "./contexts/AuthContext"; +import { ColorThemeProvider } from "./contexts/ColorThemeContext"; import { ThemeProvider } from "./contexts/ThemeContext"; import { UpdateNotificationProvider } from "./contexts/UpdateNotificationContext"; @@ -41,6 +42,7 @@ const SettingsServerConfig = lazy( () => import("./pages/settings/SettingsServerConfig"), ); const SettingsUsers = lazy(() => import("./pages/settings/SettingsUsers")); +const SettingsMetrics = lazy(() => import("./pages/settings/SettingsMetrics")); // Loading fallback component const LoadingFallback = () => ( @@ -388,6 +390,16 @@ function AppRoutes() { } /> + + + + + + } + /> - - - - - - - + + + + + + + + + ); } diff --git a/frontend/src/components/Layout.jsx b/frontend/src/components/Layout.jsx index 8f68a16..9833914 100644 --- a/frontend/src/components/Layout.jsx +++ b/frontend/src/components/Layout.jsx @@ -26,9 +26,11 @@ import { Zap, } from "lucide-react"; import { useCallback, useEffect, useRef, useState } from "react"; -import { FaYoutube } from "react-icons/fa"; +import { FaReddit, FaYoutube } from "react-icons/fa"; import { Link, useLocation, useNavigate } from "react-router-dom"; +import trianglify from "trianglify"; import { useAuth } from "../contexts/AuthContext"; +import { useColorTheme } from "../contexts/ColorThemeContext"; import { useUpdateNotification } from "../contexts/UpdateNotificationContext"; import { dashboardAPI, versionAPI } from "../utils/api"; import DiscordIcon from "./DiscordIcon"; @@ -61,7 +63,9 @@ const Layout = ({ children }) => { canManageSettings, } = useAuth(); const { updateAvailable } = useUpdateNotification(); + const { themeConfig } = useColorTheme(); const userMenuRef = useRef(null); + const bgCanvasRef = useRef(null); // Fetch dashboard stats for the "Last updated" info const { @@ -233,27 +237,103 @@ const Layout = ({ children }) => { navigate("/hosts?action=add"); }; + // Generate Trianglify background for dark mode + useEffect(() => { + const generateBackground = () => { + if ( + bgCanvasRef.current && + themeConfig?.login && + document.documentElement.classList.contains("dark") + ) { + // Get current date as seed for daily variation + const today = new Date(); + const dateSeed = `${today.getFullYear()}-${today.getMonth()}-${today.getDate()}`; + + // Generate pattern with selected theme configuration + const pattern = trianglify({ + width: window.innerWidth, + height: window.innerHeight, + cellSize: themeConfig.login.cellSize, + variance: themeConfig.login.variance, + seed: dateSeed, + xColors: themeConfig.login.xColors, + yColors: themeConfig.login.yColors, + }); + + // Render to canvas + pattern.toCanvas(bgCanvasRef.current); + } + }; + + generateBackground(); + + // Regenerate on window resize or theme change + const handleResize = () => { + generateBackground(); + }; + + window.addEventListener("resize", handleResize); + + // Watch for dark mode changes + const observer = new MutationObserver((mutations) => { + mutations.forEach((mutation) => { + if (mutation.attributeName === "class") { + generateBackground(); + } + }); + }); + + observer.observe(document.documentElement, { + attributes: true, + attributeFilter: ["class"], + }); + + return () => { + window.removeEventListener("resize", handleResize); + observer.disconnect(); + }; + }, [themeConfig]); + // Fetch GitHub stars count const fetchGitHubStars = useCallback(async () => { - // Skip if already fetched recently + // Try to load cached star count first + const cachedStars = localStorage.getItem("githubStarsCount"); + if (cachedStars) { + setGithubStars(parseInt(cachedStars, 10)); + } + + // Skip API call if fetched recently const lastFetch = localStorage.getItem("githubStarsFetchTime"); const now = Date.now(); - if (lastFetch && now - parseInt(lastFetch, 15) < 600000) { - // 15 minute cache + if (lastFetch && now - parseInt(lastFetch, 10) < 600000) { + // 10 minute cache return; } try { const response = await fetch( "https://api.github.com/repos/9technologygroup/patchmon.net", + { + headers: { + Accept: "application/vnd.github.v3+json", + }, + }, ); + if (response.ok) { const data = await response.json(); setGithubStars(data.stargazers_count); + localStorage.setItem( + "githubStarsCount", + data.stargazers_count.toString(), + ); localStorage.setItem("githubStarsFetchTime", now.toString()); + } else if (response.status === 403 || response.status === 429) { + console.warn("GitHub API rate limit exceeded, using cached value"); } } catch (error) { console.error("Failed to fetch GitHub stars:", error); + // Keep using cached value if available } }, []); @@ -303,11 +383,76 @@ const Layout = ({ children }) => { fetchGitHubStars(); }, [fetchGitHubStars]); + // Set CSS custom properties for glassmorphism and theme colors in dark mode + useEffect(() => { + const updateThemeStyles = () => { + const isDark = document.documentElement.classList.contains("dark"); + const root = document.documentElement; + + if (isDark && themeConfig?.app) { + // Glass navigation bars - very light for pattern visibility + root.style.setProperty("--sidebar-bg", "rgba(0, 0, 0, 0.15)"); + root.style.setProperty("--sidebar-blur", "blur(12px)"); + root.style.setProperty("--topbar-bg", "rgba(0, 0, 0, 0.15)"); + root.style.setProperty("--topbar-blur", "blur(12px)"); + root.style.setProperty("--button-bg", "rgba(255, 255, 255, 0.15)"); + root.style.setProperty("--button-blur", "blur(8px)"); + + // Theme-colored cards and buttons - darker to stand out + root.style.setProperty("--card-bg", themeConfig.app.cardBg); + root.style.setProperty("--card-border", themeConfig.app.cardBorder); + root.style.setProperty("--card-bg-hover", themeConfig.app.bgTertiary); + root.style.setProperty("--theme-button-bg", themeConfig.app.buttonBg); + root.style.setProperty( + "--theme-button-hover", + themeConfig.app.buttonHover, + ); + } else { + // Light mode - standard colors + root.style.setProperty("--sidebar-bg", "white"); + root.style.setProperty("--sidebar-blur", "none"); + root.style.setProperty("--topbar-bg", "white"); + root.style.setProperty("--topbar-blur", "none"); + root.style.setProperty("--button-bg", "white"); + root.style.setProperty("--button-blur", "none"); + root.style.setProperty("--card-bg", "white"); + root.style.setProperty("--card-border", "#e5e7eb"); + root.style.setProperty("--card-bg-hover", "#f9fafb"); + root.style.setProperty("--theme-button-bg", "#f3f4f6"); + root.style.setProperty("--theme-button-hover", "#e5e7eb"); + } + }; + + updateThemeStyles(); + + // Watch for dark mode changes + const observer = new MutationObserver(() => { + updateThemeStyles(); + }); + + observer.observe(document.documentElement, { + attributes: true, + attributeFilter: ["class"], + }); + + return () => observer.disconnect(); + }, [themeConfig]); + return ( -
+
+ {/* Full-screen Trianglify Background (Dark Mode Only) */} + +
{/* Mobile sidebar */}
+
@@ -562,19 +740,6 @@ const Layout = ({ children }) => { )}
- {/* Collapse/Expand button on border */} -