From d1e74cce1d0b3073d08a3dcc287e08fc6432fcbc Mon Sep 17 00:00:00 2001 From: 9technologygroup Date: Tue, 30 Dec 2025 10:03:13 +0000 Subject: [PATCH 01/13] Buttons and login page edits --- backend/src/routes/socialMediaStatsRoutes.js | 27 + backend/src/server.js | 5 + backend/src/services/automation/index.js | 19 + .../services/automation/socialMediaStats.js | 710 ++++++++++++++++++ frontend/src/components/Layout.jsx | 350 ++++----- frontend/src/components/ReleaseNotesModal.jsx | 21 +- frontend/src/pages/Login.jsx | 66 +- 7 files changed, 978 insertions(+), 220 deletions(-) create mode 100644 backend/src/routes/socialMediaStatsRoutes.js create mode 100644 backend/src/services/automation/socialMediaStats.js diff --git a/backend/src/routes/socialMediaStatsRoutes.js b/backend/src/routes/socialMediaStatsRoutes.js new file mode 100644 index 0000000..3fd2d82 --- /dev/null +++ b/backend/src/routes/socialMediaStatsRoutes.js @@ -0,0 +1,27 @@ +const express = require("express"); +const { + socialMediaStatsCache, +} = require("../services/automation/socialMediaStats"); + +const router = express.Router(); + +// Get social media statistics from cache +router.get("/", async (_req, res) => { + try { + res.json({ + github_stars: socialMediaStatsCache.github_stars, + discord_members: socialMediaStatsCache.discord_members, + buymeacoffee_supporters: socialMediaStatsCache.buymeacoffee_supporters, + youtube_subscribers: socialMediaStatsCache.youtube_subscribers, + linkedin_followers: socialMediaStatsCache.linkedin_followers, + last_updated: socialMediaStatsCache.last_updated, + }); + } catch (error) { + console.error("Error fetching social media stats:", error); + res.status(500).json({ + error: "Failed to fetch social media statistics", + }); + } +}); + +module.exports = router; diff --git a/backend/src/server.js b/backend/src/server.js index c839597..735f6e6 100644 --- a/backend/src/server.js +++ b/backend/src/server.js @@ -75,6 +75,7 @@ const apiHostsRoutes = require("./routes/apiHostsRoutes"); const releaseNotesRoutes = require("./routes/releaseNotesRoutes"); const releaseNotesAcceptanceRoutes = require("./routes/releaseNotesAcceptanceRoutes"); const buyMeACoffeeRoutes = require("./routes/buyMeACoffeeRoutes"); +const socialMediaStatsRoutes = require("./routes/socialMediaStatsRoutes"); const { initSettings } = require("./services/settingsService"); const { queueManager } = require("./services/automation"); const { authenticateToken, requireAdmin } = require("./middleware/auth"); @@ -491,6 +492,7 @@ app.use( releaseNotesAcceptanceRoutes, ); app.use(`/api/${apiVersion}/buy-me-a-coffee`, buyMeACoffeeRoutes); +app.use(`/api/${apiVersion}/social-media-stats`, socialMediaStatsRoutes); // Bull Board - will be populated after queue manager initializes let bullBoardRouter = null; @@ -900,6 +902,9 @@ async function startServer() { // Schedule recurring jobs await queueManager.scheduleAllJobs(); + // Trigger social media stats collection on boot + await queueManager.triggerSocialMediaStats(); + // Set up Bull Board for queue monitoring const serverAdapter = new ExpressAdapter(); // Set basePath to match where we mount the router diff --git a/backend/src/services/automation/index.js b/backend/src/services/automation/index.js index dc76b15..b835608 100644 --- a/backend/src/services/automation/index.js +++ b/backend/src/services/automation/index.js @@ -14,6 +14,7 @@ const DockerInventoryCleanup = require("./dockerInventoryCleanup"); const DockerImageUpdateCheck = require("./dockerImageUpdateCheck"); const MetricsReporting = require("./metricsReporting"); const SystemStatistics = require("./systemStatistics"); +const SocialMediaStats = require("./socialMediaStats"); // Queue names const QUEUE_NAMES = { @@ -25,6 +26,7 @@ const QUEUE_NAMES = { DOCKER_IMAGE_UPDATE_CHECK: "docker-image-update-check", METRICS_REPORTING: "metrics-reporting", SYSTEM_STATISTICS: "system-statistics", + SOCIAL_MEDIA_STATS: "social-media-stats", AGENT_COMMANDS: "agent-commands", }; @@ -111,6 +113,9 @@ class QueueManager { this.automations[QUEUE_NAMES.SYSTEM_STATISTICS] = new SystemStatistics( this, ); + this.automations[QUEUE_NAMES.SOCIAL_MEDIA_STATS] = new SocialMediaStats( + this, + ); console.log("✅ All automation classes initialized"); } @@ -205,6 +210,15 @@ class QueueManager { workerOptions, ); + // Social Media Stats Worker + this.workers[QUEUE_NAMES.SOCIAL_MEDIA_STATS] = new Worker( + QUEUE_NAMES.SOCIAL_MEDIA_STATS, + this.automations[QUEUE_NAMES.SOCIAL_MEDIA_STATS].process.bind( + this.automations[QUEUE_NAMES.SOCIAL_MEDIA_STATS], + ), + workerOptions, + ); + // Agent Commands Worker this.workers[QUEUE_NAMES.AGENT_COMMANDS] = new Worker( QUEUE_NAMES.AGENT_COMMANDS, @@ -372,6 +386,7 @@ class QueueManager { await this.automations[QUEUE_NAMES.DOCKER_IMAGE_UPDATE_CHECK].schedule(); await this.automations[QUEUE_NAMES.METRICS_REPORTING].schedule(); await this.automations[QUEUE_NAMES.SYSTEM_STATISTICS].schedule(); + await this.automations[QUEUE_NAMES.SOCIAL_MEDIA_STATS].schedule(); } /** @@ -415,6 +430,10 @@ class QueueManager { return this.automations[QUEUE_NAMES.METRICS_REPORTING].triggerManual(); } + async triggerSocialMediaStats() { + return this.automations[QUEUE_NAMES.SOCIAL_MEDIA_STATS].triggerManual(); + } + /** * Get queue statistics */ diff --git a/backend/src/services/automation/socialMediaStats.js b/backend/src/services/automation/socialMediaStats.js new file mode 100644 index 0000000..236c1f9 --- /dev/null +++ b/backend/src/services/automation/socialMediaStats.js @@ -0,0 +1,710 @@ +const axios = require("axios"); + +// In-memory cache for social media statistics +const socialMediaStatsCache = { + github_stars: null, + discord_members: null, + buymeacoffee_supporters: null, + youtube_subscribers: null, + linkedin_followers: null, + last_updated: null, +}; + +/** + * Helper function to parse subscriber/follower count from text with K/M/B suffixes + */ +function parseCount(text) { + if (!text) return null; + + // Remove commas and extract numbers + const cleanText = text.replace(/,/g, "").trim(); + + // Match patterns like "1.2K", "1.2M", "123K", etc. + const match = cleanText.match(/([\d.]+)\s*([KMB])?/i); + if (match) { + let count = parseFloat(match[1]); + const suffix = match[2]?.toUpperCase(); + + if (suffix === "K") { + count *= 1000; + } else if (suffix === "M") { + count *= 1000000; + } else if (suffix === "B") { + count *= 1000000000; + } + + return Math.floor(count); + } + + // Try to find just numbers + const numbers = cleanText.match(/\d+/); + if (numbers) { + return parseInt(numbers[0], 10); + } + + return null; +} + +/** + * Scrape Buy Me a Coffee supporter count + * Extracted from buyMeACoffeeRoutes.js + */ +async function scrapeBuyMeACoffee() { + try { + const response = await axios.get("https://buymeacoffee.com/iby___", { + headers: { + "User-Agent": + "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36", + }, + timeout: 10000, + }); + + const html = response.data; + let supporterCount = null; + + // Pattern 1: Look for "X supporters" text + const textPatterns = [ + /(\d+)\s+supporters?/i, + /(\d+)\s+people\s+(have\s+)?(bought|supported)/i, + /supporter[^>]*>.*?(\d+)/i, + /(\d+)[^<]*supporter/i, + />(\d+)<[^>]*supporter/i, + /supporter[^<]*<[^>]*>(\d+)/i, + ]; + + for (const pattern of textPatterns) { + const match = html.match(pattern); + if (match?.[1]) { + const count = parseInt(match[1], 10); + if (count > 0 && count < 1000000) { + supporterCount = count; + break; + } + } + } + + // Pattern 2: Look for data attributes + if (!supporterCount) { + const dataPatterns = [ + /data-supporters?=["'](\d+)["']/i, + /data-count=["'](\d+)["']/i, + /supporter[^>]*data-[^=]*=["'](\d+)["']/i, + ]; + + for (const pattern of dataPatterns) { + const match = html.match(pattern); + if (match?.[1]) { + const count = parseInt(match[1], 10); + if (count > 0 && count < 1000000) { + supporterCount = count; + break; + } + } + } + } + + // Pattern 3: Look for JSON-LD structured data + if (!supporterCount) { + const jsonLdMatches = html.matchAll( + /]*type=["']application\/ld\+json["'][^>]*>(.*?)<\/script>/gis, + ); + for (const jsonLdMatch of jsonLdMatches) { + try { + const jsonLd = JSON.parse(jsonLdMatch[1]); + const findCount = (obj) => { + if (typeof obj !== "object" || obj === null) return null; + if (obj.supporterCount || obj.supporter_count || obj.supporters) { + return parseInt( + obj.supporterCount || obj.supporter_count || obj.supporters, + 10, + ); + } + for (const value of Object.values(obj)) { + if (typeof value === "object") { + const found = findCount(value); + if (found) return found; + } + } + return null; + }; + const count = findCount(jsonLd); + if (count && count > 0 && count < 1000000) { + supporterCount = count; + break; + } + } catch (_e) { + // Ignore JSON parse errors + } + } + } + + // Pattern 4: Look for class names or IDs + if (!supporterCount) { + const classPatterns = [ + /class="[^"]*supporter[^"]*"[^>]*>.*?(\d+)/i, + /id="[^"]*supporter[^"]*"[^>]*>.*?(\d+)/i, + /]*class="[^"]*count[^"]*"[^>]*>(\d+)<\/span>/i, + ]; + + for (const pattern of classPatterns) { + const match = html.match(pattern); + if (match?.[1]) { + const count = parseInt(match[1], 10); + if (count > 0 && count < 1000000) { + supporterCount = count; + break; + } + } + } + } + + // Pattern 5: Look for numbers near supporter-related text + if (!supporterCount) { + const numberMatches = html.matchAll(/\b(\d{1,6})\b/g); + for (const match of numberMatches) { + const num = parseInt(match[1], 10); + if (num > 0 && num < 1000000) { + const start = Math.max(0, match.index - 200); + const end = Math.min( + html.length, + match.index + match[0].length + 200, + ); + const context = html.substring(start, end).toLowerCase(); + if ( + context.includes("supporter") || + context.includes("coffee") || + context.includes("donation") + ) { + supporterCount = num; + break; + } + } + } + } + + return supporterCount; + } catch (error) { + console.error("Error scraping Buy Me a Coffee:", error.message); + return null; + } +} + +/** + * Scrape Discord member count from invitation page + */ +async function scrapeDiscord() { + try { + const response = await axios.get("https://patchmon.net/discord", { + headers: { + "User-Agent": + "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36", + }, + timeout: 10000, + maxRedirects: 5, + }); + + const html = response.data; + let memberCount = null; + + // Pattern 1: Look for "X members" or "X online" text + const textPatterns = [ + /(\d+)\s+members?/i, + /(\d+)\s+online/i, + /member[^>]*>.*?(\d+)/i, + /(\d+)[^<]*member/i, + />(\d+)<[^>]*member/i, + ]; + + for (const pattern of textPatterns) { + const match = html.match(pattern); + if (match?.[1]) { + const count = parseInt(match[1], 10); + if (count > 0 && count < 10000000) { + memberCount = count; + break; + } + } + } + + // Pattern 2: Look for data attributes + if (!memberCount) { + const dataPatterns = [ + /data-members?=["'](\d+)["']/i, + /data-count=["'](\d+)["']/i, + /member[^>]*data-[^=]*=["'](\d+)["']/i, + ]; + + for (const pattern of dataPatterns) { + const match = html.match(pattern); + if (match?.[1]) { + const count = parseInt(match[1], 10); + if (count > 0 && count < 10000000) { + memberCount = count; + break; + } + } + } + } + + // Pattern 3: Look for JSON-LD or meta tags + if (!memberCount) { + const jsonLdMatches = html.matchAll( + /]*type=["']application\/ld\+json["'][^>]*>(.*?)<\/script>/gis, + ); + for (const jsonLdMatch of jsonLdMatches) { + try { + const jsonLd = JSON.parse(jsonLdMatch[1]); + const findCount = (obj) => { + if (typeof obj !== "object" || obj === null) return null; + if (obj.memberCount || obj.member_count || obj.members) { + return parseInt( + obj.memberCount || obj.member_count || obj.members, + 10, + ); + } + for (const value of Object.values(obj)) { + if (typeof value === "object") { + const found = findCount(value); + if (found) return found; + } + } + return null; + }; + const count = findCount(jsonLd); + if (count && count > 0 && count < 10000000) { + memberCount = count; + break; + } + } catch (_e) { + // Ignore JSON parse errors + } + } + } + + // Pattern 4: Look for numbers near member-related text + if (!memberCount) { + const numberMatches = html.matchAll(/\b(\d{1,7})\b/g); + for (const match of numberMatches) { + const num = parseInt(match[1], 10); + if (num > 0 && num < 10000000) { + const start = Math.max(0, match.index - 200); + const end = Math.min( + html.length, + match.index + match[0].length + 200, + ); + const context = html.substring(start, end).toLowerCase(); + if ( + context.includes("member") || + context.includes("discord") || + context.includes("online") + ) { + memberCount = num; + break; + } + } + } + } + + return memberCount; + } catch (error) { + console.error("Error scraping Discord:", error.message); + return null; + } +} + +/** + * Scrape YouTube subscriber count + */ +async function scrapeYouTube() { + try { + const response = await axios.get("https://www.youtube.com/@patchmonTV", { + headers: { + "User-Agent": + "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36", + }, + timeout: 10000, + }); + + const html = response.data; + let subscriberCount = null; + + // Pattern 1: Look for ytInitialData JavaScript variable + const ytInitialDataMatch = html.match(/var ytInitialData = ({.+?});/); + if (ytInitialDataMatch) { + try { + const ytData = JSON.parse(ytInitialDataMatch[1]); + // Navigate through the nested structure to find subscriber count + const findSubscriberCount = (obj, depth = 0) => { + if (depth > 10) return null; + if (typeof obj !== "object" || obj === null) return null; + + // Check for subscriber count in various possible locations + if (obj.subscriberCount || obj.subscriber_count || obj.subscribers) { + const count = parseCount( + String( + obj.subscriberCount || obj.subscriber_count || obj.subscribers, + ), + ); + if (count && count > 0) return count; + } + + // Check for subscriber text + if (typeof obj === "string" && obj.includes("subscriber")) { + const count = parseCount(obj); + if (count && count > 0) return count; + } + + // Recursively search + if (Array.isArray(obj)) { + for (const item of obj) { + const found = findSubscriberCount(item, depth + 1); + if (found) return found; + } + } else { + for (const value of Object.values(obj)) { + const found = findSubscriberCount(value, depth + 1); + if (found) return found; + } + } + + return null; + }; + + subscriberCount = findSubscriberCount(ytData); + } catch (_e) { + // Ignore JSON parse errors + } + } + + // Pattern 2: Look for subscriber count in HTML text + if (!subscriberCount) { + const subscriberPatterns = [ + /(\d+(?:\.\d+)?[KMB]?)\s+subscribers?/i, + /subscribers?[^>]*>.*?(\d+(?:\.\d+)?[KMB]?)/i, + /(\d+(?:\.\d+)?[KMB]?)[^<]*subscriber/i, + ]; + + for (const pattern of subscriberPatterns) { + const match = html.match(pattern); + if (match?.[1]) { + const count = parseCount(match[1]); + if (count && count > 0) { + subscriberCount = count; + break; + } + } + } + } + + // Pattern 3: Look for numbers near subscriber-related text + if (!subscriberCount) { + const numberMatches = html.matchAll(/(\d+(?:\.\d+)?[KMB]?)/gi); + for (const match of numberMatches) { + const start = Math.max(0, match.index - 100); + const end = Math.min(html.length, match.index + match[0].length + 100); + const context = html.substring(start, end).toLowerCase(); + if (context.includes("subscriber")) { + const count = parseCount(match[1]); + if (count && count > 0) { + subscriberCount = count; + break; + } + } + } + } + + return subscriberCount; + } catch (error) { + console.error("Error scraping YouTube:", error.message); + return null; + } +} + +/** + * Scrape LinkedIn follower count + */ +async function scrapeLinkedIn() { + try { + const response = await axios.get("https://linkedin.com/company/patchmon", { + headers: { + "User-Agent": + "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36", + }, + timeout: 10000, + }); + + const html = response.data; + let followerCount = null; + + // Pattern 1: Look for follower count in text + const textPatterns = [ + /(\d+(?:\.\d+)?[KMB]?)\s+followers?/i, + /followers?[^>]*>.*?(\d+(?:\.\d+)?[KMB]?)/i, + /(\d+(?:\.\d+)?[KMB]?)[^<]*follower/i, + ]; + + for (const pattern of textPatterns) { + const match = html.match(pattern); + if (match?.[1]) { + const count = parseCount(match[1]); + if (count && count > 0) { + followerCount = count; + break; + } + } + } + + // Pattern 2: Look for data attributes + if (!followerCount) { + const dataPatterns = [ + /data-followers?=["'](\d+(?:\.\d+)?[KMB]?)["']/i, + /data-count=["'](\d+(?:\.\d+)?[KMB]?)["']/i, + /follower[^>]*data-[^=]*=["'](\d+(?:\.\d+)?[KMB]?)["']/i, + ]; + + for (const pattern of dataPatterns) { + const match = html.match(pattern); + if (match?.[1]) { + const count = parseCount(match[1]); + if (count && count > 0) { + followerCount = count; + break; + } + } + } + } + + // Pattern 3: Look for JSON-LD structured data + if (!followerCount) { + const jsonLdMatches = html.matchAll( + /]*type=["']application\/ld\+json["'][^>]*>(.*?)<\/script>/gis, + ); + for (const jsonLdMatch of jsonLdMatches) { + try { + const jsonLd = JSON.parse(jsonLdMatch[1]); + const findCount = (obj) => { + if (typeof obj !== "object" || obj === null) return null; + if (obj.followerCount || obj.follower_count || obj.followers) { + return parseCount( + String( + obj.followerCount || obj.follower_count || obj.followers, + ), + ); + } + for (const value of Object.values(obj)) { + if (typeof value === "object") { + const found = findCount(value); + if (found) return found; + } + } + return null; + }; + const count = findCount(jsonLd); + if (count && count > 0) { + followerCount = count; + break; + } + } catch (_e) { + // Ignore JSON parse errors + } + } + } + + // Pattern 4: Look for numbers near follower-related text + if (!followerCount) { + const numberMatches = html.matchAll(/(\d+(?:\.\d+)?[KMB]?)/gi); + for (const match of numberMatches) { + const start = Math.max(0, match.index - 100); + const end = Math.min(html.length, match.index + match[0].length + 100); + const context = html.substring(start, end).toLowerCase(); + if (context.includes("follower")) { + const count = parseCount(match[1]); + if (count && count > 0) { + followerCount = count; + break; + } + } + } + } + + return followerCount; + } catch (error) { + console.error("Error scraping LinkedIn:", error.message); + return null; + } +} + +/** + * Social Media Stats Collection Automation + * Fetches social media statistics and stores them in memory cache + */ +class SocialMediaStats { + constructor(queueManager) { + this.queueManager = queueManager; + this.queueName = "social-media-stats"; + } + + /** + * Process social media stats collection job + */ + async process(_job) { + const startTime = Date.now(); + console.log("📊 Starting social media stats collection..."); + + const results = { + github_stars: null, + discord_members: null, + buymeacoffee_supporters: null, + youtube_subscribers: null, + linkedin_followers: null, + }; + + try { + // Fetch GitHub stars + try { + const response = await axios.get( + "https://api.github.com/repos/PatchMon/PatchMon", + { + headers: { + Accept: "application/vnd.github.v3+json", + }, + timeout: 10000, + }, + ); + + if (response.data?.stargazers_count) { + results.github_stars = response.data.stargazers_count; + console.log(`✅ GitHub stars: ${results.github_stars}`); + } + } catch (error) { + console.error("Error fetching GitHub stars:", error.message); + } + + // Scrape Discord members + try { + const discordCount = await scrapeDiscord(); + if (discordCount !== null) { + results.discord_members = discordCount; + console.log(`✅ Discord members: ${results.discord_members}`); + } + } catch (error) { + console.error("Error scraping Discord:", error.message); + } + + // Scrape Buy Me a Coffee supporters + try { + const bmcCount = await scrapeBuyMeACoffee(); + if (bmcCount !== null) { + results.buymeacoffee_supporters = bmcCount; + console.log( + `✅ Buy Me a Coffee supporters: ${results.buymeacoffee_supporters}`, + ); + } + } catch (error) { + console.error("Error scraping Buy Me a Coffee:", error.message); + } + + // Scrape YouTube subscribers + try { + const youtubeCount = await scrapeYouTube(); + if (youtubeCount !== null) { + results.youtube_subscribers = youtubeCount; + console.log(`✅ YouTube subscribers: ${results.youtube_subscribers}`); + } + } catch (error) { + console.error("Error scraping YouTube:", error.message); + } + + // Scrape LinkedIn followers + try { + const linkedinCount = await scrapeLinkedIn(); + if (linkedinCount !== null) { + results.linkedin_followers = linkedinCount; + console.log(`✅ LinkedIn followers: ${results.linkedin_followers}`); + } + } catch (error) { + console.error("Error scraping LinkedIn:", error.message); + } + + // Update cache - only update fields that successfully fetched + // Preserve existing values if fetch failed + if (results.github_stars !== null) { + socialMediaStatsCache.github_stars = results.github_stars; + } + if (results.discord_members !== null) { + socialMediaStatsCache.discord_members = results.discord_members; + } + if (results.buymeacoffee_supporters !== null) { + socialMediaStatsCache.buymeacoffee_supporters = + results.buymeacoffee_supporters; + } + if (results.youtube_subscribers !== null) { + socialMediaStatsCache.youtube_subscribers = results.youtube_subscribers; + } + if (results.linkedin_followers !== null) { + socialMediaStatsCache.linkedin_followers = results.linkedin_followers; + } + + // Update last_updated timestamp if at least one stat was fetched + if ( + results.github_stars !== null || + results.discord_members !== null || + results.buymeacoffee_supporters !== null || + results.youtube_subscribers !== null || + results.linkedin_followers !== null + ) { + socialMediaStatsCache.last_updated = new Date(); + } + + const executionTime = Date.now() - startTime; + console.log( + `✅ Social media stats collection completed in ${executionTime}ms`, + ); + + return { + success: true, + ...results, + executionTime, + }; + } catch (error) { + const executionTime = Date.now() - startTime; + console.error( + `❌ Social media stats collection failed after ${executionTime}ms:`, + error.message, + ); + throw error; + } + } + + /** + * Schedule recurring social media stats collection (daily at midnight) + */ + async schedule() { + const job = await this.queueManager.queues[this.queueName].add( + "social-media-stats", + {}, + { + repeat: { cron: "0 0 * * *" }, // Daily at midnight + jobId: "social-media-stats-recurring", + }, + ); + console.log("✅ Social media stats collection scheduled"); + return job; + } + + /** + * Trigger manual social media stats collection + */ + async triggerManual() { + const job = await this.queueManager.queues[this.queueName].add( + "social-media-stats-manual", + {}, + { priority: 1 }, + ); + console.log("✅ Manual social media stats collection triggered"); + return job; + } +} + +module.exports = SocialMediaStats; +module.exports.socialMediaStatsCache = socialMediaStatsCache; diff --git a/frontend/src/components/Layout.jsx b/frontend/src/components/Layout.jsx index 3e10e5f..590dd53 100644 --- a/frontend/src/components/Layout.jsx +++ b/frontend/src/components/Layout.jsx @@ -26,7 +26,7 @@ import { Zap, } from "lucide-react"; import { useCallback, useEffect, useRef, useState } from "react"; -import { FaReddit, FaYoutube } from "react-icons/fa"; +import { FaLinkedin, FaYoutube } from "react-icons/fa"; import { Link, useLocation, useNavigate } from "react-router-dom"; import { useAuth } from "../contexts/AuthContext"; import { useColorTheme } from "../contexts/ColorThemeContext"; @@ -39,6 +39,15 @@ import ReleaseNotesModal from "./ReleaseNotesModal"; import UpgradeNotificationIcon from "./UpgradeNotificationIcon"; const Layout = ({ children }) => { + // Helper function to format numbers in k format (e.g., 1663 -> 1.7k) + const formatNumber = (num) => { + if (num >= 1000) { + const rounded = Math.ceil((num / 1000) * 10) / 10; // Round up to 1 decimal place + return `${rounded.toFixed(1)}k`; + } + return num.toString(); + }; + const [sidebarOpen, setSidebarOpen] = useState(false); const [sidebarCollapsed, setSidebarCollapsed] = useState(() => { // Load sidebar state from localStorage, default to false @@ -46,7 +55,13 @@ const Layout = ({ children }) => { return saved ? JSON.parse(saved) : false; }); const [_userMenuOpen, setUserMenuOpen] = useState(false); - const [githubStars, setGithubStars] = useState(null); + const [socialMediaStats, setSocialMediaStats] = useState({ + github_stars: null, + discord_members: null, + buymeacoffee_supporters: null, + youtube_subscribers: null, + linkedin_followers: null, + }); const [mobileLinksOpen, setMobileLinksOpen] = useState(false); const [showReleaseNotes, setShowReleaseNotes] = useState(false); const location = useLocation(); @@ -388,46 +403,22 @@ const Layout = ({ children }) => { }; }, [themeConfig]); - // Fetch GitHub stars count - const fetchGitHubStars = useCallback(async () => { - // Try to load cached star count first - const cachedStars = localStorage.getItem("githubStarsCount"); - if (cachedStars) { - setGithubStars(parseInt(cachedStars, 10)); - } - - // Skip API call if fetched recently - const lastFetch = localStorage.getItem("githubStarsFetchTime"); - const now = Date.now(); - if (lastFetch && now - parseInt(lastFetch, 10) < 600000) { - // 10 minute cache - return; - } - + // Fetch social media stats from cache + const fetchSocialMediaStats = useCallback(async () => { try { - const response = await fetch( - "https://api.github.com/repos/9technologygroup/patchmon.net", - { - headers: { - Accept: "application/vnd.github.v3+json", - }, - }, - ); - + const response = await fetch("/api/v1/social-media-stats"); if (response.ok) { const data = await response.json(); - setGithubStars(data.stargazers_count); - localStorage.setItem( - "githubStarsCount", - data.stargazers_count.toString(), - ); - localStorage.setItem("githubStarsFetchTime", now.toString()); - } else if (response.status === 403 || response.status === 429) { - console.warn("GitHub API rate limit exceeded, using cached value"); + setSocialMediaStats({ + github_stars: data.github_stars, + discord_members: data.discord_members, + buymeacoffee_supporters: data.buymeacoffee_supporters, + youtube_subscribers: data.youtube_subscribers, + linkedin_followers: data.linkedin_followers, + }); } } catch (error) { - console.error("Failed to fetch GitHub stars:", error); - // Keep using cached value if available + console.error("Failed to fetch social media stats:", error); } }, []); @@ -472,10 +463,10 @@ const Layout = ({ children }) => { }; }, []); - // Fetch GitHub stars on component mount + // Fetch social media stats on component mount useEffect(() => { - fetchGitHubStars(); - }, [fetchGitHubStars]); + fetchSocialMediaStats(); + }, [fetchSocialMediaStats]); // Set CSS custom properties for glassmorphism and theme colors in dark mode useEffect(() => { @@ -1055,6 +1046,72 @@ const Layout = ({ children }) => { })} )} + + {/* External Links Section - Roadmap, Documentation, Email, Website */} + {/* Profile Section - Bottom of Sidebar */} @@ -1288,10 +1345,12 @@ const Layout = ({ children }) => { GitHub - {githubStars !== null && ( + {socialMediaStats.github_stars !== null && (
- {githubStars} + + {formatNumber(socialMediaStats.github_stars)} +
)} @@ -1314,32 +1373,14 @@ const Layout = ({ children }) => { Buy Me a Coffee - - {/* Roadmap */} - setMobileLinksOpen(false)} - > - - - Roadmap - - - {/* Docs */} - setMobileLinksOpen(false)} - > - - - Documentation - + {socialMediaStats.buymeacoffee_supporters !== + null && ( +
+ + {socialMediaStats.buymeacoffee_supporters} + +
+ )}
{/* Discord */} { className="flex items-center gap-3 px-3 py-3 bg-gray-50 dark:bg-gray-800 text-secondary-600 dark:text-secondary-300 hover:bg-gray-100 dark:hover:bg-gray-700 rounded-lg transition-colors min-h-[44px]" onClick={() => setMobileLinksOpen(false)} > - + Discord + {socialMediaStats.discord_members !== null && ( +
+ + {socialMediaStats.discord_members} + +
+ )}
- {/* Email */} + {/* LinkedIn */} setMobileLinksOpen(false)} > - + - Email Support + LinkedIn + {socialMediaStats.linkedin_followers !== null && ( +
+ + {socialMediaStats.linkedin_followers} + +
+ )}
{/* YouTube */} { className="flex items-center gap-3 px-3 py-3 bg-gray-50 dark:bg-gray-800 text-secondary-600 dark:text-secondary-300 hover:bg-gray-100 dark:hover:bg-gray-700 rounded-lg transition-colors min-h-[44px]" onClick={() => setMobileLinksOpen(false)} > - + YouTube - - {/* Reddit */} - setMobileLinksOpen(false)} - > - - - Reddit - - - {/* Web */} - setMobileLinksOpen(false)} - > - - - Website - + {socialMediaStats.youtube_subscribers !== null && ( +
+ + {socialMediaStats.youtube_subscribers} + +
+ )}
@@ -1427,10 +1465,12 @@ const Layout = ({ children }) => { aria-label="GitHub" > - {githubStars !== null && ( + {socialMediaStats.github_stars !== null && (
- {githubStars} + + {formatNumber(socialMediaStats.github_stars)} +
)} @@ -1439,7 +1479,7 @@ const Layout = ({ children }) => { href="https://buymeacoffee.com/iby___" target="_blank" rel="noopener noreferrer" - className="flex items-center justify-center w-10 h-10 bg-gray-50 dark:bg-transparent text-secondary-600 dark:text-secondary-300 hover:bg-gray-100 dark:hover:bg-white/10 rounded-lg transition-colors shadow-sm" + className="flex items-center justify-center gap-1.5 w-auto px-2.5 h-10 bg-gray-50 dark:bg-transparent text-secondary-600 dark:text-secondary-300 hover:bg-gray-100 dark:hover:bg-white/10 rounded-lg transition-colors shadow-sm" style={{ backgroundColor: "var(--button-bg, rgb(249, 250, 251))", backdropFilter: "var(--button-blur, none)", @@ -1456,43 +1496,18 @@ const Layout = ({ children }) => { Buy Me a Coffee + {socialMediaStats.buymeacoffee_supporters !== null && ( + + {socialMediaStats.buymeacoffee_supporters} + + )} - {/* 3) Roadmap */} - - - - {/* 4) Docs */} - - - - {/* 5) Discord */} + {/* 3) Discord */} { }} title="Discord" > - + + {socialMediaStats.discord_members !== null && ( + + {socialMediaStats.discord_members} + + )} - {/* 6) Email */} + {/* 4) LinkedIn */} - + + {socialMediaStats.linkedin_followers !== null && ( + + {socialMediaStats.linkedin_followers} + + )} - {/* 7) YouTube */} + {/* 5) YouTube */} { }} title="YouTube Channel" > - - - {/* 8) Reddit */} - - - - {/* 9) Web */} - - + + {socialMediaStats.youtube_subscribers !== null && ( + + {socialMediaStats.youtube_subscribers} + + )} diff --git a/frontend/src/components/ReleaseNotesModal.jsx b/frontend/src/components/ReleaseNotesModal.jsx index bf5f90c..52f4d74 100644 --- a/frontend/src/components/ReleaseNotesModal.jsx +++ b/frontend/src/components/ReleaseNotesModal.jsx @@ -35,15 +35,11 @@ const ReleaseNotesModal = ({ isOpen, onAccept }) => { enabled: isOpen && !!versionInfo?.version, }); - // Fetch supporter count from Buy Me a Coffee - const { data: supporterData, isLoading: isLoadingSupporters } = useQuery({ - queryKey: ["buyMeACoffeeSupporters"], + // Fetch supporter count from social media stats + const { data: socialMediaStats, isLoading: isLoadingSupporters } = useQuery({ + queryKey: ["socialMediaStats"], queryFn: async () => { - const response = await fetch("/api/v1/buy-me-a-coffee/supporter-count", { - headers: { - Authorization: `Bearer ${localStorage.getItem("token")}`, - }, - }); + const response = await fetch("/api/v1/social-media-stats"); if (!response.ok) return null; return response.json(); }, @@ -322,12 +318,15 @@ const ReleaseNotesModal = ({ isOpen, onAccept }) => { {/* Supporter/Member count - only show for self-hosted */} {!isCloudVersion && !isLoadingSupporters && - supporterData?.count !== undefined && ( + socialMediaStats?.buymeacoffee_supporters !== undefined && + socialMediaStats?.buymeacoffee_supporters !== null && (

- {supporterData.count} + {socialMediaStats.buymeacoffee_supporters} {" "} - {supporterData.count === 1 ? "person has" : "people have"}{" "} + {socialMediaStats.buymeacoffee_supporters === 1 + ? "person has" + : "people have"}{" "} bought me a coffee so far!

)} diff --git a/frontend/src/pages/Login.jsx b/frontend/src/pages/Login.jsx index 7517382..f8ea6ca 100644 --- a/frontend/src/pages/Login.jsx +++ b/frontend/src/pages/Login.jsx @@ -14,7 +14,7 @@ import { } from "lucide-react"; import { useEffect, useId, useRef, useState } from "react"; -import { FaReddit, FaYoutube } from "react-icons/fa"; +import { FaLinkedin, FaYoutube } from "react-icons/fa"; import { useNavigate } from "react-router-dom"; import DiscordIcon from "../components/DiscordIcon"; @@ -52,7 +52,13 @@ const Login = () => { const [showGithubVersionOnLogin, setShowGithubVersionOnLogin] = useState(true); const [latestRelease, setLatestRelease] = useState(null); - const [githubStars, setGithubStars] = useState(null); + const [socialMediaStats, setSocialMediaStats] = useState({ + github_stars: null, + discord_members: null, + buymeacoffee_supporters: null, + youtube_subscribers: null, + linkedin_followers: null, + }); const canvasRef = useRef(null); const { themeConfig } = useColorTheme(); @@ -175,27 +181,36 @@ const Login = () => { checkLoginSettings(); }, []); - // Fetch latest release and stars from GitHub + // Fetch latest release and social media stats useEffect(() => { // Only fetch if the setting allows it if (!showGithubVersionOnLogin) { return; } - const fetchGitHubData = async () => { + const fetchData = async () => { try { - // Try to get cached data first + // Try to get cached release data first const cachedRelease = localStorage.getItem("githubLatestRelease"); - const cachedStars = localStorage.getItem("githubStarsCount"); const cacheTime = localStorage.getItem("githubReleaseCacheTime"); const now = Date.now(); - // Load cached data immediately + // Load cached release data immediately if (cachedRelease) { setLatestRelease(JSON.parse(cachedRelease)); } - if (cachedStars) { - setGithubStars(parseInt(cachedStars, 10)); + + // Fetch social media stats from cache + const statsResponse = await fetch("/api/v1/social-media-stats"); + if (statsResponse.ok) { + const statsData = await statsResponse.json(); + setSocialMediaStats({ + github_stars: statsData.github_stars, + discord_members: statsData.discord_members, + buymeacoffee_supporters: statsData.buymeacoffee_supporters, + youtube_subscribers: statsData.youtube_subscribers, + linkedin_followers: statsData.linkedin_followers, + }); } // Use cache if less than 1 hour old @@ -203,25 +218,6 @@ const Login = () => { return; } - // Fetch repository info (includes star count) - const repoResponse = await fetch( - "https://api.github.com/repos/PatchMon/PatchMon", - { - headers: { - Accept: "application/vnd.github.v3+json", - }, - }, - ); - - if (repoResponse.ok) { - const repoData = await repoResponse.json(); - setGithubStars(repoData.stargazers_count); - localStorage.setItem( - "githubStarsCount", - repoData.stargazers_count.toString(), - ); - } - // Fetch latest release const releaseResponse = await fetch( "https://api.github.com/repos/PatchMon/PatchMon/releases/latest", @@ -271,7 +267,7 @@ const Login = () => { } }; - fetchGitHubData(); + fetchData(); }, [showGithubVersionOnLogin]); // Run once on mount const handleSubmit = async (e) => { @@ -572,11 +568,11 @@ const Login = () => { title="GitHub Repository" > - {githubStars !== null && ( + {socialMediaStats.github_stars !== null && (
- {githubStars} + {socialMediaStats.github_stars}
)} @@ -632,18 +628,18 @@ const Login = () => { className="flex items-center justify-center w-10 h-10 bg-white/10 hover:bg-white/20 backdrop-blur-sm rounded-lg transition-colors border border-white/10" title="YouTube Channel" > - + {/* Reddit */} - + {/* Website */} From 33d90d15886b9cb555af8915913e84c0a55320d0 Mon Sep 17 00:00:00 2001 From: Muhammad Ibrahim Date: Sat, 3 Jan 2026 19:47:20 +0000 Subject: [PATCH 02/13] Fix: Save release notes acceptance when closing personal note modal --- frontend/src/components/ReleaseNotesModal.jsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/components/ReleaseNotesModal.jsx b/frontend/src/components/ReleaseNotesModal.jsx index 52f4d74..bdd111e 100644 --- a/frontend/src/components/ReleaseNotesModal.jsx +++ b/frontend/src/components/ReleaseNotesModal.jsx @@ -169,7 +169,7 @@ const ReleaseNotesModal = ({ isOpen, onAccept }) => { {currentStep === 2 && ( + + {pkg.description && ( + + )} + ); case "packageHosts": { // Show total number of hosts where this package is installed @@ -526,17 +546,17 @@ const Packages = () => {

Packages

-

+

Manage package updates and security patches

- - - {/* Description (only rendered if a description exists) */} - {pkg.description && ( -
-
- {pkg.description} -
-
- )} + + {pkg.description && ( + + )} + {/* Status and Hosts on same line */}
@@ -961,6 +990,48 @@ const Packages = () => { onReset={resetColumns} /> )} + + {/* Description Modal */} + {descriptionModal && ( +
+ +
+
+
+

+ {descriptionModal.description} +

+
+
+ +
+ + + )} ); }; From 01693d7e3491f3dc8bc67839de34008efcb51957 Mon Sep 17 00:00:00 2001 From: Muhammad Ibrahim Date: Sun, 4 Jan 2026 15:01:55 +0000 Subject: [PATCH 04/13] Added ability to edit autoenrolment tokens once created --- backend/src/routes/autoEnrollmentRoutes.js | 38 ++++++- frontend/src/pages/settings/Integrations.jsx | 112 ++++++++++++++----- 2 files changed, 124 insertions(+), 26 deletions(-) diff --git a/backend/src/routes/autoEnrollmentRoutes.js b/backend/src/routes/autoEnrollmentRoutes.js index 093aa2c..8865179 100644 --- a/backend/src/routes/autoEnrollmentRoutes.js +++ b/backend/src/routes/autoEnrollmentRoutes.js @@ -344,9 +344,16 @@ router.patch( authenticateToken, requireManageSettings, [ + body("token_name") + .optional() + .isLength({ min: 1, max: 255 }) + .withMessage("Token name must be between 1 and 255 characters"), body("is_active").optional().isBoolean(), body("max_hosts_per_day").optional().isInt({ min: 1, max: 1000 }), body("allowed_ip_ranges").optional().isArray(), + body("default_host_group_id") + .optional({ nullable: true, checkFalsy: true }) + .isString(), body("expires_at").optional().isISO8601(), body("scopes") .optional() @@ -373,6 +380,9 @@ router.patch( const update_data = { updated_at: new Date() }; + // Allow updating token name + if (req.body.token_name !== undefined) + update_data.token_name = req.body.token_name; if (req.body.is_active !== undefined) update_data.is_active = req.body.is_active; if (req.body.max_hosts_per_day !== undefined) @@ -382,6 +392,25 @@ router.patch( if (req.body.expires_at !== undefined) update_data.expires_at = new Date(req.body.expires_at); + // Handle default host group update + if (req.body.default_host_group_id !== undefined) { + if (req.body.default_host_group_id) { + // Validate host group exists + const host_group = await prisma.host_groups.findUnique({ + where: { id: req.body.default_host_group_id }, + }); + + if (!host_group) { + return res.status(400).json({ error: "Host group not found" }); + } + + update_data.default_host_group_id = req.body.default_host_group_id; + } else { + // Allow clearing the default host group + update_data.default_host_group_id = null; + } + } + // Handle scopes updates for API tokens only if (req.body.scopes !== undefined) { if (existing_token.metadata?.integration_type === "api") { @@ -421,9 +450,16 @@ router.patch( where: { id: tokenId }, data: update_data, include: { - host_groups: true, + host_groups: { + select: { + id: true, + name: true, + color: true, + }, + }, users: { select: { + id: true, username: true, first_name: true, last_name: true, diff --git a/frontend/src/pages/settings/Integrations.jsx b/frontend/src/pages/settings/Integrations.jsx index c611f0f..a92cfc6 100644 --- a/frontend/src/pages/settings/Integrations.jsx +++ b/frontend/src/pages/settings/Integrations.jsx @@ -13,7 +13,7 @@ import { } from "lucide-react"; import { useEffect, useId, useState } from "react"; import SettingsLayout from "../../components/SettingsLayout"; -import api from "../../utils/api"; +import api, { formatDate } from "../../utils/api"; const Integrations = () => { // Generate unique IDs for form elements @@ -235,11 +235,20 @@ const Integrations = () => { try { const data = { + token_name: form_data.token_name, + max_hosts_per_day: form_data.max_hosts_per_day, allowed_ip_ranges: form_data.allowed_ip_ranges ? form_data.allowed_ip_ranges.split(",").map((ip) => ip.trim()) : [], }; + // Add default host group if provided + if (form_data.default_host_group_id) { + data.default_host_group_id = form_data.default_host_group_id; + } else { + data.default_host_group_id = null; + } + // Add expiration if provided if (form_data.expires_at) { data.expires_at = form_data.expires_at; @@ -323,7 +332,7 @@ const Integrations = () => { } }; - const format_date = (date_string) => { + const formatDate = (date_string) => { if (!date_string) return "Never"; return new Date(date_string).toLocaleString(); }; @@ -579,15 +588,15 @@ const Integrations = () => { {token.allowed_ip_ranges.join(", ")}

)} -

Created: {format_date(token.created_at)}

+

Created: {formatDate(token.created_at)}

{token.last_used_at && (

- Last Used: {format_date(token.last_used_at)} + Last Used: {formatDate(token.last_used_at)}

)} {token.expires_at && (

- Expires: {format_date(token.expires_at)} + Expires: {formatDate(token.expires_at)} {new Date(token.expires_at) < new Date() && ( @@ -599,15 +608,13 @@ const Integrations = () => {

- {token.metadata?.integration_type === "api" && ( - - )} +
-

Created: {format_date(token.created_at)}

+

Created: {formatDate(token.created_at)}

{token.last_used_at && (

- Last Used: {format_date(token.last_used_at)} + Last Used: {formatDate(token.last_used_at)}

)} {token.expires_at && (

- Expires: {format_date(token.expires_at)} + Expires: {formatDate(token.expires_at)} {new Date(token.expires_at) < new Date() && ( @@ -1821,14 +1828,14 @@ const Integrations = () => { )} - {/* Edit API Credential Modal */} + {/* Edit Token Modal */} {show_edit_modal && edit_token && (

- Edit API Credential + Edit Token

-
+
+ + + + + {(edit_token?.metadata?.integration_type === "proxmox-lxc" || + edit_token?.metadata?.integration_type === "direct-host") && ( + + )} {edit_token?.metadata?.integration_type === "api" && (
From e269c12463526d328e23c740f808f65bf833a152 Mon Sep 17 00:00:00 2001 From: Muhammad Ibrahim Date: Sun, 4 Jan 2026 16:03:24 +0000 Subject: [PATCH 05/13] Fix #402 - relating to Accepting the WhatsNew messages --- backend/src/routes/authRoutes.js | 20 ++++++++------------ backend/src/routes/hostRoutes.js | 9 ++++++++- biome.json | 2 +- 3 files changed, 17 insertions(+), 14 deletions(-) diff --git a/backend/src/routes/authRoutes.js b/backend/src/routes/authRoutes.js index 9ca0a03..bdff526 100644 --- a/backend/src/routes/authRoutes.js +++ b/backend/src/routes/authRoutes.js @@ -847,12 +847,10 @@ router.post( // Get accepted release notes versions let acceptedVersions = []; try { - if (prisma.release_notes_acceptances) { - acceptedVersions = await prisma.release_notes_acceptances.findMany({ - where: { user_id: user.id }, - select: { version: true }, - }); - } + acceptedVersions = await prisma.release_notes_acceptances.findMany({ + where: { user_id: user.id }, + select: { version: true }, + }); } catch (error) { // If table doesn't exist yet or Prisma client not regenerated, use empty array console.warn( @@ -1010,12 +1008,10 @@ router.post( // Get accepted release notes versions let acceptedVersions = []; try { - if (prisma.release_notes_acceptances) { - acceptedVersions = await prisma.release_notes_acceptances.findMany({ - where: { user_id: user.id }, - select: { version: true }, - }); - } + acceptedVersions = await prisma.release_notes_acceptances.findMany({ + where: { user_id: user.id }, + select: { version: true }, + }); } catch (error) { // If table doesn't exist yet or Prisma client not regenerated, use empty array console.warn( diff --git a/backend/src/routes/hostRoutes.js b/backend/src/routes/hostRoutes.js index 1b5667a..c71bc42 100644 --- a/backend/src/routes/hostRoutes.js +++ b/backend/src/routes/hostRoutes.js @@ -625,7 +625,14 @@ router.post( if (req.body.diskDetails) updateData.disk_details = req.body.diskDetails; // Network Information - if (req.body.gatewayIp) updateData.gateway_ip = req.body.gatewayIp; + if (req.body.gatewayIp) { + updateData.gateway_ip = req.body.gatewayIp; + } else if (Object.hasOwn(req.body, "gatewayIp")) { + // Log warning if gateway field was sent but empty (isolated network) + console.warn( + `Host ${host.hostname} reported with no default gateway configured`, + ); + } if (req.body.dnsServers) updateData.dns_servers = req.body.dnsServers; if (req.body.networkInterfaces) updateData.network_interfaces = req.body.networkInterfaces; diff --git a/biome.json b/biome.json index f81ca03..24499dd 100644 --- a/biome.json +++ b/biome.json @@ -1,5 +1,5 @@ { - "$schema": "https://biomejs.dev/schemas/2.3.4/schema.json", + "$schema": "https://biomejs.dev/schemas/2.3.11/schema.json", "vcs": { "enabled": true, "clientKind": "git", From be5ab681d2988b5f56f81fb435c78f45f2dbd116 Mon Sep 17 00:00:00 2001 From: Muhammad Ibrahim Date: Sun, 4 Jan 2026 16:10:05 +0000 Subject: [PATCH 06/13] Biome version upgrade --- package-lock.json | 72 +++++++++++++++++++++++------------------------ package.json | 2 +- 2 files changed, 37 insertions(+), 37 deletions(-) diff --git a/package-lock.json b/package-lock.json index 5661d3e..54b61f0 100644 --- a/package-lock.json +++ b/package-lock.json @@ -13,7 +13,7 @@ "frontend" ], "devDependencies": { - "@biomejs/biome": "^2.3.4", + "@biomejs/biome": "^2.3.11", "concurrently": "^8.2.2", "lefthook": "^1.13.4" }, @@ -363,9 +363,9 @@ } }, "node_modules/@biomejs/biome": { - "version": "2.3.4", - "resolved": "https://registry.npmjs.org/@biomejs/biome/-/biome-2.3.4.tgz", - "integrity": "sha512-TU08LXjBHdy0mEY9APtEtZdNQQijXUDSXR7IK1i45wgoPD5R0muK7s61QcFir6FpOj/RP1+YkPx5QJlycXUU3w==", + "version": "2.3.11", + "resolved": "https://registry.npmjs.org/@biomejs/biome/-/biome-2.3.11.tgz", + "integrity": "sha512-/zt+6qazBWguPG6+eWmiELqO+9jRsMZ/DBU3lfuU2ngtIQYzymocHhKiZRyrbra4aCOoyTg/BmY+6WH5mv9xmQ==", "dev": true, "license": "MIT OR Apache-2.0", "bin": { @@ -379,20 +379,20 @@ "url": "https://opencollective.com/biome" }, "optionalDependencies": { - "@biomejs/cli-darwin-arm64": "2.3.4", - "@biomejs/cli-darwin-x64": "2.3.4", - "@biomejs/cli-linux-arm64": "2.3.4", - "@biomejs/cli-linux-arm64-musl": "2.3.4", - "@biomejs/cli-linux-x64": "2.3.4", - "@biomejs/cli-linux-x64-musl": "2.3.4", - "@biomejs/cli-win32-arm64": "2.3.4", - "@biomejs/cli-win32-x64": "2.3.4" + "@biomejs/cli-darwin-arm64": "2.3.11", + "@biomejs/cli-darwin-x64": "2.3.11", + "@biomejs/cli-linux-arm64": "2.3.11", + "@biomejs/cli-linux-arm64-musl": "2.3.11", + "@biomejs/cli-linux-x64": "2.3.11", + "@biomejs/cli-linux-x64-musl": "2.3.11", + "@biomejs/cli-win32-arm64": "2.3.11", + "@biomejs/cli-win32-x64": "2.3.11" } }, "node_modules/@biomejs/cli-darwin-arm64": { - "version": "2.3.4", - "resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-arm64/-/cli-darwin-arm64-2.3.4.tgz", - "integrity": "sha512-w40GvlNzLaqmuWYiDU6Ys9FNhJiclngKqcGld3iJIiy2bpJ0Q+8n3haiaC81uTPY/NA0d8Q/I3Z9+ajc14102Q==", + "version": "2.3.11", + "resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-arm64/-/cli-darwin-arm64-2.3.11.tgz", + "integrity": "sha512-/uXXkBcPKVQY7rc9Ys2CrlirBJYbpESEDme7RKiBD6MmqR2w3j0+ZZXRIL2xiaNPsIMMNhP1YnA+jRRxoOAFrA==", "cpu": [ "arm64" ], @@ -407,9 +407,9 @@ } }, "node_modules/@biomejs/cli-darwin-x64": { - "version": "2.3.4", - "resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-x64/-/cli-darwin-x64-2.3.4.tgz", - "integrity": "sha512-3s7TLVtjJ7ni1xADXsS7x7GMUrLBZXg8SemXc3T0XLslzvqKj/dq1xGeBQ+pOWQzng9MaozfacIHdK2UlJ3jGA==", + "version": "2.3.11", + "resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-x64/-/cli-darwin-x64-2.3.11.tgz", + "integrity": "sha512-fh7nnvbweDPm2xEmFjfmq7zSUiox88plgdHF9OIW4i99WnXrAC3o2P3ag9judoUMv8FCSUnlwJCM1B64nO5Fbg==", "cpu": [ "x64" ], @@ -424,9 +424,9 @@ } }, "node_modules/@biomejs/cli-linux-arm64": { - "version": "2.3.4", - "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64/-/cli-linux-arm64-2.3.4.tgz", - "integrity": "sha512-y7efHyyM2gYmHy/AdWEip+VgTMe9973aP7XYKPzu/j8JxnPHuSUXftzmPhkVw0lfm4ECGbdBdGD6+rLmTgNZaA==", + "version": "2.3.11", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64/-/cli-linux-arm64-2.3.11.tgz", + "integrity": "sha512-l4xkGa9E7Uc0/05qU2lMYfN1H+fzzkHgaJoy98wO+b/7Gl78srbCRRgwYSW+BTLixTBrM6Ede5NSBwt7rd/i6g==", "cpu": [ "arm64" ], @@ -441,9 +441,9 @@ } }, "node_modules/@biomejs/cli-linux-arm64-musl": { - "version": "2.3.4", - "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.3.4.tgz", - "integrity": "sha512-IruVGQRwMURivWazchiq7gKAqZSFs5so6gi0hJyxk7x6HR+iwZbO2IxNOqyLURBvL06qkIHs7Wffl6Bw30vCbQ==", + "version": "2.3.11", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.3.11.tgz", + "integrity": "sha512-XPSQ+XIPZMLaZ6zveQdwNjbX+QdROEd1zPgMwD47zvHV+tCGB88VH+aynyGxAHdzL+Tm/+DtKST5SECs4iwCLg==", "cpu": [ "arm64" ], @@ -458,9 +458,9 @@ } }, "node_modules/@biomejs/cli-linux-x64": { - "version": "2.3.4", - "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64/-/cli-linux-x64-2.3.4.tgz", - "integrity": "sha512-gKfjWR/6/dfIxPJCw8REdEowiXCkIpl9jycpNVHux8aX2yhWPLjydOshkDL6Y/82PcQJHn95VCj7J+BRcE5o1Q==", + "version": "2.3.11", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64/-/cli-linux-x64-2.3.11.tgz", + "integrity": "sha512-/1s9V/H3cSe0r0Mv/Z8JryF5x9ywRxywomqZVLHAoa/uN0eY7F8gEngWKNS5vbbN/BsfpCG5yeBT5ENh50Frxg==", "cpu": [ "x64" ], @@ -475,9 +475,9 @@ } }, "node_modules/@biomejs/cli-linux-x64-musl": { - "version": "2.3.4", - "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64-musl/-/cli-linux-x64-musl-2.3.4.tgz", - "integrity": "sha512-mzKFFv/w66e4/jCobFmD3kymCqG+FuWE7sVa4Yjqd9v7qt2UhXo67MSZKY9Ih18V2IwPzRKQPCw6KwdZs6AXSA==", + "version": "2.3.11", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64-musl/-/cli-linux-x64-musl-2.3.11.tgz", + "integrity": "sha512-vU7a8wLs5C9yJ4CB8a44r12aXYb8yYgBn+WeyzbMjaCMklzCv1oXr8x+VEyWodgJt9bDmhiaW/I0RHbn7rsNmw==", "cpu": [ "x64" ], @@ -492,9 +492,9 @@ } }, "node_modules/@biomejs/cli-win32-arm64": { - "version": "2.3.4", - "resolved": "https://registry.npmjs.org/@biomejs/cli-win32-arm64/-/cli-win32-arm64-2.3.4.tgz", - "integrity": "sha512-5TJ6JfVez+yyupJ/iGUici2wzKf0RrSAxJhghQXtAEsc67OIpdwSKAQboemILrwKfHDi5s6mu7mX+VTCTUydkw==", + "version": "2.3.11", + "resolved": "https://registry.npmjs.org/@biomejs/cli-win32-arm64/-/cli-win32-arm64-2.3.11.tgz", + "integrity": "sha512-PZQ6ElCOnkYapSsysiTy0+fYX+agXPlWugh6+eQ6uPKI3vKAqNp6TnMhoM3oY2NltSB89hz59o8xIfOdyhi9Iw==", "cpu": [ "arm64" ], @@ -509,9 +509,9 @@ } }, "node_modules/@biomejs/cli-win32-x64": { - "version": "2.3.4", - "resolved": "https://registry.npmjs.org/@biomejs/cli-win32-x64/-/cli-win32-x64-2.3.4.tgz", - "integrity": "sha512-FGCijXecmC4IedQ0esdYNlMpx0Jxgf4zceCaMu6fkjWyjgn50ZQtMiqZZQ0Q/77yqPxvtkgZAvt5uGw0gAAjig==", + "version": "2.3.11", + "resolved": "https://registry.npmjs.org/@biomejs/cli-win32-x64/-/cli-win32-x64-2.3.11.tgz", + "integrity": "sha512-43VrG813EW+b5+YbDbz31uUsheX+qFKCpXeY9kfdAx+ww3naKxeVkTD9zLIWxUPfJquANMHrmW3wbe/037G0Qg==", "cpu": [ "x64" ], diff --git a/package.json b/package.json index 58dac5c..2194d52 100644 --- a/package.json +++ b/package.json @@ -25,7 +25,7 @@ "lint:fix": "biome check --write ." }, "devDependencies": { - "@biomejs/biome": "^2.3.4", + "@biomejs/biome": "^2.3.11", "concurrently": "^8.2.2", "lefthook": "^1.13.4" }, From dd09b82299094187b4fabff23c497c0ddbba26b6 Mon Sep 17 00:00:00 2001 From: Muhammad Ibrahim Date: Sun, 4 Jan 2026 16:17:01 +0000 Subject: [PATCH 07/13] Added scroll bar and fixed empty gateway issue --- backend/src/routes/hostRoutes.js | 2 +- frontend/src/pages/HostDetail.jsx | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/backend/src/routes/hostRoutes.js b/backend/src/routes/hostRoutes.js index c71bc42..ffacfb1 100644 --- a/backend/src/routes/hostRoutes.js +++ b/backend/src/routes/hostRoutes.js @@ -532,7 +532,7 @@ router.post( .withMessage("Disk details must be an array"), // Network Information body("gatewayIp") - .optional() + .optional({ checkFalsy: true }) .isIP() .withMessage("Gateway IP must be a valid IP address"), body("dnsServers") diff --git a/frontend/src/pages/HostDetail.jsx b/frontend/src/pages/HostDetail.jsx index 48898a9..206c9d8 100644 --- a/frontend/src/pages/HostDetail.jsx +++ b/frontend/src/pages/HostDetail.jsx @@ -1187,7 +1187,7 @@ const HostDetail = () => { Disk Usage -
+
{host.disk_details.map((disk, index) => (
{ Disk Usage -
+
{host.disk_details.map((disk, index) => (
Date: Sat, 3 Jan 2026 18:01:30 +0000 Subject: [PATCH 08/13] Fix #393: Add explicit network configuration for Docker Swarm deployments Services can only discover each other by hostname within the same Docker network. This fix adds an explicit `patchmon-internal` bridge network that all services (database, redis, backend, frontend) connect to, enabling reliable service discovery in both Docker Compose and Docker Swarm environments. Changes: - Added `patchmon-internal` network definition to docker-compose.yml and docker-compose.dev.yml - Connected all services to the internal network for service-to-service communication - Added comprehensive Docker Swarm documentation to docker/README.md with network configuration guidance and troubleshooting for the "host not found in upstream" error This resolves the nginx error users experienced when deploying to Docker Swarm with the frontend on a separate network from the backend. --- docker/README.md | 73 +++++++++++++++++++++++++++++++++++ docker/docker-compose.dev.yml | 12 ++++++ docker/docker-compose.yml | 12 ++++++ 3 files changed, 97 insertions(+) diff --git a/docker/README.md b/docker/README.md index 32409c4..7ee6fdb 100644 --- a/docker/README.md +++ b/docker/README.md @@ -225,6 +225,79 @@ If you wish to bind either if their respective container paths to a host path ra --- +## Docker Swarm Deployment + +> [!NOTE] +> This section covers deploying PatchMon to a Docker Swarm cluster. For standard Docker Compose deployments on a single host, use the production deployment guide above. + +### Network Configuration + +When deploying to Docker Swarm with a reverse proxy (e.g., Traefik), proper network configuration is critical. The default `docker-compose.yml` uses an internal bridge network that enables service-to-service communication: + +```yaml +networks: + patchmon-internal: + driver: bridge +``` + +All services (database, redis, backend, and frontend) connect to this internal network, allowing them to discover each other by service name. + +**Important**: If you're using an external reverse proxy network (like `traefik-net`), ensure that: + +1. All PatchMon services remain on the `patchmon-internal` network for internal communication +2. The frontend service (NGINX) can be configured to also bind to the reverse proxy network if needed +3. Service names resolve correctly within the same network + +### Service Discovery in Swarm + +In Docker Swarm, service discovery works through: +- **Service Name Resolution**: Service names resolve to virtual IPs within the same network +- **Load Balancing**: Requests to a service name are automatically load-balanced across all replicas +- **Network Isolation**: Services on different networks cannot communicate directly + +### Configuration for Swarm with Traefik + +If you're using Traefik as a reverse proxy: + +1. Keep the default `patchmon-internal` network for backend services +2. Configure Traefik in your Swarm deployment with its own network +3. Ensure the frontend service can reach the backend through the internal network + +Example modification for Swarm: + +```yaml +services: + frontend: + image: ghcr.io/patchmon/patchmon-frontend:latest + networks: + - patchmon-internal + deploy: + replicas: 1 + labels: + - "traefik.enable=true" + - "traefik.http.routers.patchmon.rule=Host(`patchmon.my.domain`)" + # ... other Traefik labels +``` + +The frontend reaches the backend via the `patchmon-internal` network using the hostname `backend`, while Traefik routes external traffic to the frontend service. + +### Troubleshooting Network Issues + +**Error: `host not found in upstream "backend"`** + +This typically occurs when: +1. Frontend and backend services are on different networks +2. Services haven't fully started (check health checks) +3. Service names haven't propagated through DNS + +**Solution**: +- Verify all services are on the same internal network +- Check service health status: `docker ps` (production) or `docker service ps` (Swarm) +- Wait for health checks to pass before accessing the application +- Confirm network connectivity: `docker exec ping backend` + +--- + # Development This section is for developers who want to contribute to PatchMon or run it in development mode. diff --git a/docker/docker-compose.dev.yml b/docker/docker-compose.dev.yml index 351f6fc..c82c671 100644 --- a/docker/docker-compose.dev.yml +++ b/docker/docker-compose.dev.yml @@ -12,6 +12,8 @@ services: - "5432:5432" volumes: - ./compose_dev_data/db:/var/lib/postgresql/data + networks: + - patchmon-internal healthcheck: test: ["CMD-SHELL", "pg_isready -U patchmon_user -d patchmon_db"] interval: 3s @@ -28,6 +30,8 @@ services: - "6379:6379" volumes: - ./compose_dev_data/redis:/data + networks: + - patchmon-internal healthcheck: test: ["CMD", "redis-cli", "--no-auth-warning", "-a", "1NS3CU6E_DEV_R3DIS_PASSW0RD", "ping"] interval: 3s @@ -75,6 +79,8 @@ services: volumes: - ./compose_dev_data/agents:/app/agents - ./compose_dev_data/assets:/app/assets + networks: + - patchmon-internal depends_on: database: condition: service_healthy @@ -107,6 +113,8 @@ services: - "3000:3000" volumes: - ./compose_dev_data/assets:/app/frontend/public/assets + networks: + - patchmon-internal depends_on: backend: condition: service_healthy @@ -119,3 +127,7 @@ services: - node_modules/ - action: rebuild path: ../frontend/package.json + +networks: + patchmon-internal: + driver: bridge diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 746d933..8d23d51 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -26,6 +26,8 @@ services: POSTGRES_PASSWORD: # CREATE A STRONG DB PASSWORD AND PUT IT HERE volumes: - postgres_data:/var/lib/postgresql/data + networks: + - patchmon-internal healthcheck: test: ["CMD-SHELL", "pg_isready -U patchmon_user -d patchmon_db"] interval: 3s @@ -38,6 +40,8 @@ services: command: redis-server --requirepass your-redis-password-here # CHANGE THIS TO YOUR REDIS PASSWORD volumes: - redis_data:/data + networks: + - patchmon-internal healthcheck: test: ["CMD", "redis-cli", "--no-auth-warning", "-a", "your-redis-password-here", "ping"] # CHANGE THIS TO YOUR REDIS PASSWORD interval: 3s @@ -79,6 +83,8 @@ services: volumes: - agent_files:/app/agents - branding_assets:/app/assets + networks: + - patchmon-internal depends_on: database: condition: service_healthy @@ -92,6 +98,8 @@ services: - "3000:3000" volumes: - branding_assets:/usr/share/nginx/html/assets + networks: + - patchmon-internal depends_on: backend: condition: service_healthy @@ -101,3 +109,7 @@ volumes: redis_data: agent_files: branding_assets: + +networks: + patchmon-internal: + driver: bridge From abd08bf4e1bc5ed22fc85e158b64fd811ca5341b Mon Sep 17 00:00:00 2001 From: Muhammad Ibrahim Date: Sun, 4 Jan 2026 16:54:03 +0000 Subject: [PATCH 09/13] Added function to determine the nginx version for right http2 syntax --- setup.sh | 71 ++++++++++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 69 insertions(+), 2 deletions(-) diff --git a/setup.sh b/setup.sh index ed489a8..4a76402 100755 --- a/setup.sh +++ b/setup.sh @@ -1348,6 +1348,62 @@ EOF print_status "Systemd service created: $SERVICE_NAME (running as $INSTANCE_USER)" } +# Get nginx HTTP/2 syntax based on version +get_nginx_http2_syntax() { + local listen_line="" + local http2_line="" + + # Get nginx version (e.g., "nginx version: nginx/1.18.0" or "nginx/1.24.0") + local nginx_version="" + + if command -v nginx >/dev/null 2>&1; then + local nginx_version_output=$(nginx -v 2>&1) + # Try Perl regex first (more reliable) + if echo "$nginx_version_output" | grep -oP 'nginx/\K[0-9]+\.[0-9]+' >/dev/null 2>&1; then + nginx_version=$(echo "$nginx_version_output" | grep -oP 'nginx/\K[0-9]+\.[0-9]+' | head -1) + else + # Fallback: use sed for systems without Perl regex support + nginx_version=$(echo "$nginx_version_output" | sed -n 's/.*nginx\/\([0-9]\+\.[0-9]\+\).*/\1/p' | head -1) + fi + + # If still empty, try nginx -V (verbose output) + if [ -z "$nginx_version" ]; then + local nginx_verbose_output=$(nginx -V 2>&1) + if echo "$nginx_verbose_output" | grep -oP 'nginx/\K[0-9]+\.[0-9]+' >/dev/null 2>&1; then + nginx_version=$(echo "$nginx_verbose_output" | grep -oP 'nginx/\K[0-9]+\.[0-9]+' | head -1) + else + nginx_version=$(echo "$nginx_verbose_output" | sed -n 's/.*nginx\/\([0-9]\+\.[0-9]\+\).*/\1/p' | head -1) + fi + fi + fi + + if [ -z "$nginx_version" ]; then + # Fallback: assume newer version syntax (1.24.x+) + print_warning "Could not detect nginx version, using newer syntax (1.24.x+)" + listen_line=" listen 443 ssl;" + http2_line=" http2 on;" + else + # Extract major and minor version + local major_version=$(echo "$nginx_version" | cut -d. -f1) + local minor_version=$(echo "$nginx_version" | cut -d. -f2) + + # nginx 1.18.x and earlier use: listen 443 ssl http2; + # nginx 1.24.x and later use: listen 443 ssl; and http2 on; separately + # For versions 1.19-1.23, use newer syntax as it's safer + if [ "$major_version" -lt 1 ] || ([ "$major_version" -eq 1 ] && [ "$minor_version" -le 18 ]); then + print_info "Detected nginx version $nginx_version, using legacy HTTP/2 syntax (listen 443 ssl http2;)" + listen_line=" listen 443 ssl http2;" + http2_line="" + else + print_info "Detected nginx version $nginx_version, using modern HTTP/2 syntax (listen 443 ssl; http2 on;)" + listen_line=" listen 443 ssl;" + http2_line=" http2 on;" + fi + fi + + echo "$listen_line|$http2_line" +} + # Unified nginx configuration generator generate_nginx_config() { local fqdn="$1" @@ -1359,6 +1415,11 @@ generate_nginx_config() { print_info "Generating nginx configuration for $fqdn (SSL: $ssl_enabled)" if [ "$ssl_enabled" = "true" ]; then + # Get HTTP/2 syntax based on nginx version + local http2_syntax=$(get_nginx_http2_syntax) + local listen_line=$(echo "$http2_syntax" | cut -d'|' -f1) + local http2_line=$(echo "$http2_syntax" | cut -d'|' -f2) + # SSL Configuration cat > "$config_file" << EOF # HTTP to HTTPS redirect @@ -1379,8 +1440,14 @@ server { # HTTPS server block server { - listen 443 ssl; - http2 on; +$listen_line +EOF + # Add http2_line only if it's not empty (for newer versions) + if [ -n "$http2_line" ]; then + echo "$http2_line" >> "$config_file" + fi + + cat >> "$config_file" << EOF server_name $fqdn; # SSL Configuration From 955d3c6b7b68acb9d79dece0aae459310034fa93 Mon Sep 17 00:00:00 2001 From: Muhammad Ibrahim Date: Sun, 4 Jan 2026 17:04:35 +0000 Subject: [PATCH 10/13] Fixed correct queueManager method in agent_queue endpoint --- backend/src/routes/apiHostsRoutes.js | 43 +++++++++++++++++----------- 1 file changed, 27 insertions(+), 16 deletions(-) diff --git a/backend/src/routes/apiHostsRoutes.js b/backend/src/routes/apiHostsRoutes.js index ed6a81d..ca5b5c7 100644 --- a/backend/src/routes/apiHostsRoutes.js +++ b/backend/src/routes/apiHostsRoutes.js @@ -160,7 +160,12 @@ router.get( } // Calculate statistics for this specific host - const [totalInstalledPackages, outdatedPackagesCount, securityUpdatesCount, totalRepos] = await Promise.all([ + const [ + totalInstalledPackages, + outdatedPackagesCount, + securityUpdatesCount, + totalRepos, + ] = await Promise.all([ // Total packages installed on this host prisma.host_packages.count({ where: { @@ -295,7 +300,9 @@ router.get( }); } catch (error) { console.error("Error fetching host network info:", error); - res.status(500).json({ error: "Failed to fetch host network information" }); + res + .status(500) + .json({ error: "Failed to fetch host network information" }); } }, ); @@ -351,7 +358,9 @@ router.get( }); } catch (error) { console.error("Error fetching host system info:", error); - res.status(500).json({ error: "Failed to fetch host system information" }); + res + .status(500) + .json({ error: "Failed to fetch host system information" }); } }, ); @@ -465,17 +474,17 @@ router.get( try { // Try to get live queue stats from Bull/BullMQ if available const { queueManager } = require("../services/automation"); - if (queueManager && queueManager.getQueue) { - const agentQueue = queueManager.getQueue(host.api_id); - if (agentQueue) { - const counts = await agentQueue.getJobCounts(); - queueStats = { - waiting: counts.waiting || 0, - active: counts.active || 0, - delayed: counts.delayed || 0, - failed: counts.failed || 0, - }; - } + if (queueManager && queueManager.getHostJobs) { + const hostQueueData = await queueManager.getHostJobs( + host.api_id, + Number.parseInt(limit, 10), + ); + queueStats = { + waiting: hostQueueData.waiting || 0, + active: hostQueueData.active || 0, + delayed: hostQueueData.delayed || 0, + failed: hostQueueData.failed || 0, + }; } } catch (queueError) { console.warn("Could not fetch live queue stats:", queueError.message); @@ -577,7 +586,8 @@ router.get( containers_count: containers, volumes_count: volumes, networks_count: networks, - description: "Monitor Docker containers, images, volumes, and networks. Collects real-time container status events.", + description: + "Monitor Docker containers, images, volumes, and networks. Collects real-time container status events.", }; } @@ -586,7 +596,8 @@ router.get( integrations: { docker: dockerDetails || { enabled: false, - description: "Monitor Docker containers, images, volumes, and networks. Collects real-time container status events.", + description: + "Monitor Docker containers, images, volumes, and networks. Collects real-time container status events.", }, }, }); From 20711e755c2902a1a024377c5766c542d6fde2e8 Mon Sep 17 00:00:00 2001 From: Muhammad Ibrahim Date: Sun, 4 Jan 2026 17:13:50 +0000 Subject: [PATCH 11/13] fixed code quality --- .../automation/dockerImageUpdateCheck.js | 20 +++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/backend/src/services/automation/dockerImageUpdateCheck.js b/backend/src/services/automation/dockerImageUpdateCheck.js index d0abb2a..88fe861 100644 --- a/backend/src/services/automation/dockerImageUpdateCheck.js +++ b/backend/src/services/automation/dockerImageUpdateCheck.js @@ -1,6 +1,5 @@ const { prisma } = require("./shared/prisma"); const https = require("node:https"); -const http = require("node:http"); const { v4: uuidv4 } = require("uuid"); /** @@ -58,7 +57,9 @@ class DockerImageUpdateCheck { const params = {}; const regex = /(\w+)="([^"]+)"/g; let match; - while ((match = regex.exec(header)) !== null) { + while (true) { + match = regex.exec(header); + if (match === null) break; params[match[1]] = match[2]; } @@ -105,7 +106,9 @@ class DockerImageUpdateCheck { const response = await this.httpsRequest(options); if (response.statusCode !== 200) { - throw new Error(`Token request failed with status ${response.statusCode}`); + throw new Error( + `Token request failed with status ${response.statusCode}`, + ); } const tokenData = JSON.parse(response.body); @@ -188,14 +191,15 @@ class DockerImageUpdateCheck { // Get digest from Docker-Content-Digest header const digest = response.headers["docker-content-digest"]; if (!digest) { - throw new Error(`No Docker-Content-Digest header for ${imageName}:${tag}`); + throw new Error( + `No Docker-Content-Digest header for ${imageName}:${tag}`, + ); } // Clean up digest (remove sha256: prefix if present) return digest.startsWith("sha256:") ? digest.substring(7) : digest; } - /** * Parse image name to extract registry and repository */ @@ -214,7 +218,11 @@ class DockerImageUpdateCheck { const firstPart = parts[0]; // Check if first part looks like a registry (contains . or : or is localhost) - if (firstPart.includes(".") || firstPart.includes(":") || firstPart === "localhost") { + if ( + firstPart.includes(".") || + firstPart.includes(":") || + firstPart === "localhost" + ) { registry = firstPart; repository = parts.slice(1).join("/"); } From 68fe101317099de228518ee521d4dccb0cf39390 Mon Sep 17 00:00:00 2001 From: Muhammad Ibrahim Date: Sun, 4 Jan 2026 17:35:51 +0000 Subject: [PATCH 12/13] Added case insensitivity to TFA routes --- backend/src/routes/authRoutes.js | 64 ++++++++++++++++++++++++-------- 1 file changed, 49 insertions(+), 15 deletions(-) diff --git a/backend/src/routes/authRoutes.js b/backend/src/routes/authRoutes.js index bdff526..3c46ffc 100644 --- a/backend/src/routes/authRoutes.js +++ b/backend/src/routes/authRoutes.js @@ -138,7 +138,10 @@ router.post( // Check if username or email already exists const existingUser = await prisma.users.findFirst({ where: { - OR: [{ username: username.trim() }, { email: email.trim() }], + OR: [ + { username: { equals: username.trim(), mode: "insensitive" } }, + { email: email.trim().toLowerCase() }, + ], }, }); @@ -156,7 +159,7 @@ router.post( data: { id: uuidv4(), username: username.trim(), - email: email.trim(), + email: email.trim().toLowerCase(), password_hash: passwordHash, first_name: firstName.trim(), last_name: lastName.trim(), @@ -308,7 +311,10 @@ router.post( // Check if user already exists const existingUser = await prisma.users.findFirst({ where: { - OR: [{ username }, { email }], + OR: [ + { username: { equals: username, mode: "insensitive" } }, + { email: email.trim().toLowerCase() }, + ], }, }); @@ -326,7 +332,7 @@ router.post( data: { id: uuidv4(), username, - email, + email: email.trim().toLowerCase(), password_hash: passwordHash, first_name: first_name || null, last_name: last_name || null, @@ -408,8 +414,9 @@ router.put( req.body; const updateData = {}; - if (username) updateData.username = username; - if (email) updateData.email = email; + // Handle all fields consistently - trim and update if provided + if (username) updateData.username = username.trim(); + if (email) updateData.email = email.trim().toLowerCase(); if (first_name !== undefined) updateData.first_name = first_name || null; if (last_name !== undefined) updateData.last_name = last_name || null; if (role) updateData.role = role; @@ -432,8 +439,17 @@ router.put( { id: { not: userId } }, { OR: [ - ...(username ? [{ username }] : []), - ...(email ? [{ email }] : []), + ...(username + ? [ + { + username: { + equals: username.trim(), + mode: "insensitive", + }, + }, + ] + : []), + ...(email ? [{ email: email.trim().toLowerCase() }] : []), ], }, ], @@ -668,7 +684,10 @@ router.post( // Check if user already exists const existingUser = await prisma.users.findFirst({ where: { - OR: [{ username }, { email }], + OR: [ + { username: { equals: username, mode: "insensitive" } }, + { email: email.trim().toLowerCase() }, + ], }, }); @@ -690,7 +709,7 @@ router.post( data: { id: uuidv4(), username, - email, + email: email.trim().toLowerCase(), password_hash: passwordHash, first_name: firstName.trim(), last_name: lastName.trim(), @@ -755,7 +774,10 @@ router.post( // Find user by username or email const user = await prisma.users.findFirst({ where: { - OR: [{ username }, { email: username }], + OR: [ + { username: { equals: username, mode: "insensitive" } }, + { email: username.toLowerCase() }, + ], is_active: true, }, select: { @@ -919,7 +941,10 @@ router.post( // Find user const user = await prisma.users.findFirst({ where: { - OR: [{ username }, { email: username }], + OR: [ + { username: { equals: username, mode: "insensitive" } }, + { email: username.toLowerCase() }, + ], is_active: true, tfa_enabled: true, }, @@ -1100,7 +1125,7 @@ router.put( // Handle all fields consistently - trim and update if provided if (username) updateData.username = username.trim(); - if (email) updateData.email = email.trim(); + if (email) updateData.email = email.trim().toLowerCase(); if (first_name !== undefined) { // Allow null or empty string to clear the field, otherwise trim updateData.first_name = @@ -1124,8 +1149,17 @@ router.put( { id: { not: req.user.id } }, { OR: [ - ...(username ? [{ username }] : []), - ...(email ? [{ email }] : []), + ...(username + ? [ + { + username: { + equals: username.trim(), + mode: "insensitive", + }, + }, + ] + : []), + ...(email ? [{ email: email.trim().toLowerCase() }] : []), ], }, ], From 06c10c94f70eb5c37c28a1eca99bfb8c0747ab0a Mon Sep 17 00:00:00 2001 From: Muhammad Ibrahim Date: Sun, 4 Jan 2026 17:49:41 +0000 Subject: [PATCH 13/13] disabled a workflow --- .github/workflows/{app_build.yml => app_build.disabled} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename .github/workflows/{app_build.yml => app_build.disabled} (100%) diff --git a/.github/workflows/app_build.yml b/.github/workflows/app_build.disabled similarity index 100% rename from .github/workflows/app_build.yml rename to .github/workflows/app_build.disabled