diff --git a/.github/workflows/app_build.yml b/.github/workflows/app_build.yml index 70fc285..182a201 100644 --- a/.github/workflows/app_build.yml +++ b/.github/workflows/app_build.yml @@ -1,11 +1,9 @@ name: Build on Merge - on: push: branches: - main - dev - jobs: deploy: runs-on: self-hosted @@ -15,3 +13,11 @@ jobs: - name: Run rebuild script run: /root/patchmon/platform/scripts/app_build.sh ${{ github.ref_name }} + + rebuild-pmon: + runs-on: self-hosted + needs: deploy + if: github.ref_name == 'dev' + steps: + - name: Rebuild pmon + run: /root/patchmon/platform/scripts/manage_pmon_auto.sh diff --git a/README.md b/README.md index a7f3b1c..5b69cf3 100644 --- a/README.md +++ b/README.md @@ -5,6 +5,7 @@ [![GitHub](https://img.shields.io/badge/GitHub-Repository-black?style=for-the-badge&logo=github)](https://github.com/9technologygroup/patchmon.net) [![Roadmap](https://img.shields.io/badge/Roadmap-View%20Progress-green?style=for-the-badge&logo=github)](https://github.com/users/9technologygroup/projects/1) [![Documentation](https://img.shields.io/badge/Documentation-docs.patchmon.net-blue?style=for-the-badge&logo=book)](https://docs.patchmon.net/) + --- ## Please STAR this repo :D @@ -13,7 +14,7 @@ PatchMon provides centralized patch management across diverse server environments. Agents communicate outbound-only to the PatchMon server, eliminating inbound ports on monitored hosts while delivering comprehensive visibility and safe automation. -![Dashboard Screenshot](https://raw.githubusercontent.com/9technologygroup/patchmon.net/main/dashboard.jpeg) +![Dashboard Screenshot](https://raw.githubusercontent.com/PatchMon/PatchMon/main/dashboard.jpeg) ## Features @@ -42,6 +43,7 @@ PatchMon provides centralized patch management across diverse server environment ### API & Integrations - REST API under `/api/v1` with JWT auth +- **Proxmox LXC Auto-Enrollment** - Automatically discover and enroll LXC containers from Proxmox hosts ([Documentation](PROXMOX_AUTO_ENROLLMENT.md)) ### Security - Rate limiting for general, auth, and agent endpoints @@ -63,7 +65,7 @@ Managed, zero-maintenance PatchMon hosting. Stay tuned. #### Docker (preferred) -For getting started with Docker, see the [Docker documentation](https://github.com/9technologygroup/patchmon.net/blob/main/docker/README.md) +For getting started with Docker, see the [Docker documentation](https://github.com/PatchMon/PatchMon/blob/main/docker/README.md) #### Native Install (advanced/non-docker) @@ -85,7 +87,7 @@ apt install curl -y #### Script ```bash -curl -fsSL -o setup.sh https://raw.githubusercontent.com/9technologygroup/patchmon.net/refs/heads/main/setup.sh && chmod +x setup.sh && bash setup.sh +curl -fsSL -o setup.sh https://raw.githubusercontent.com/PatchMon/PatchMon/refs/heads/main/setup.sh && chmod +x setup.sh && bash setup.sh ``` #### Minimum specs for building : ##### @@ -145,7 +147,7 @@ Operational ## Roadmap -- Roadmap board: https://github.com/users/9technologygroup/projects/1 +- Roadmap board: https://github.com/orgs/PatchMon/projects/2 ## License @@ -278,6 +280,6 @@ Thank you to all our contributors who help make PatchMon better every day! **Made with โค๏ธ by the PatchMon Team** [![Discord](https://img.shields.io/badge/Discord-Join%20Server-blue?style=for-the-badge&logo=discord)](https://patchmon.net/discord) -[![GitHub](https://img.shields.io/badge/GitHub-Repository-black?style=for-the-badge&logo=github)](https://github.com/9technologygroup/patchmon.net) +[![GitHub](https://img.shields.io/badge/GitHub-Repository-black?style=for-the-badge&logo=github)](https://github.com/PatchMon/PatchMon) diff --git a/agents/patchmon-agent.sh b/agents/patchmon-agent.sh index 843e21d..679c55d 100755 --- a/agents/patchmon-agent.sh +++ b/agents/patchmon-agent.sh @@ -56,6 +56,28 @@ warning() { log "WARNING: $1" } +# Get or generate machine ID +get_machine_id() { + # Try standard locations for machine-id + if [[ -f /etc/machine-id ]]; then + cat /etc/machine-id + elif [[ -f /var/lib/dbus/machine-id ]]; then + cat /var/lib/dbus/machine-id + else + # Fallback: generate from hardware UUID or hostname+MAC + if command -v dmidecode &> /dev/null; then + local uuid=$(dmidecode -s system-uuid 2>/dev/null | tr -d ' -' | tr '[:upper:]' '[:lower:]') + if [[ -n "$uuid" && "$uuid" != "notpresent" ]]; then + echo "$uuid" + return + fi + fi + # Last resort: hash hostname + primary MAC address + local primary_mac=$(ip link show | grep -oP '(?<=link/ether\s)[0-9a-f:]+' | head -1 | tr -d ':') + echo "$HOSTNAME-$primary_mac" | sha256sum | cut -d' ' -f1 | cut -c1-32 + fi +} + # Check if running as root check_root() { if [[ $EUID -ne 0 ]]; then @@ -865,6 +887,9 @@ send_update() { # Merge all JSON objects into one local merged_json=$(echo "$hardware_json $network_json $system_json" | jq -s '.[0] * .[1] * .[2]') + # Get machine ID + local machine_id=$(get_machine_id) + # Create the base payload and merge with system info local base_payload=$(cat </dev/null || cat /proc/sys/kernel/random/uuid)" + fi +} + # Parse arguments from environment (passed via HTTP headers) if [[ -z "$PATCHMON_URL" ]] || [[ -z "$API_ID" ]] || [[ -z "$API_KEY" ]]; then error "Missing required parameters. This script should be called via the PatchMon web interface." fi +# Check if --force flag is set (for bypassing broken packages) +FORCE_INSTALL="${FORCE_INSTALL:-false}" +if [[ "$*" == *"--force"* ]] || [[ "$FORCE_INSTALL" == "true" ]]; then + FORCE_INSTALL="true" + warning "โš ๏ธ Force mode enabled - will bypass broken packages" +fi + +# Get unique machine ID for this host +MACHINE_ID=$(get_machine_id) +export MACHINE_ID + info "๐Ÿš€ Starting PatchMon Agent Installation..." info "๐Ÿ“‹ Server: $PATCHMON_URL" info "๐Ÿ”‘ API ID: ${API_ID:0:16}..." +info "๐Ÿ†” Machine ID: ${MACHINE_ID:0:16}..." # Display diagnostic information echo "" @@ -131,16 +156,88 @@ echo "" info "๐Ÿ“ฆ Installing required dependencies..." echo "" +# Function to check if a command exists +command_exists() { + command -v "$1" >/dev/null 2>&1 +} + +# Function to install packages with error handling +install_apt_packages() { + local packages=("$@") + local missing_packages=() + + # Check which packages are missing + for pkg in "${packages[@]}"; do + if ! command_exists "$pkg"; then + missing_packages+=("$pkg") + fi + done + + if [ ${#missing_packages[@]} -eq 0 ]; then + success "All required packages are already installed" + return 0 + fi + + info "Need to install: ${missing_packages[*]}" + + # Build apt-get command based on force mode + local apt_cmd="apt-get install ${missing_packages[*]} -y" + + if [[ "$FORCE_INSTALL" == "true" ]]; then + info "Using force mode - bypassing broken packages..." + apt_cmd="$apt_cmd -o APT::Get::Fix-Broken=false -o DPkg::Options::=\"--force-confold\" -o DPkg::Options::=\"--force-confdef\"" + fi + + # Try to install packages + if eval "$apt_cmd" 2>&1 | tee /tmp/patchmon_apt_install.log; then + success "Packages installed successfully" + return 0 + else + warning "Package installation encountered issues, checking if required tools are available..." + + # Verify critical dependencies are actually available + local all_ok=true + for pkg in "${packages[@]}"; do + if ! command_exists "$pkg"; then + if [[ "$FORCE_INSTALL" == "true" ]]; then + error "Critical dependency '$pkg' is not available even with --force. Please install manually." + else + error "Critical dependency '$pkg' is not available. Try again with --force flag or install manually: apt-get install $pkg" + fi + all_ok=false + fi + done + + if $all_ok; then + success "All required tools are available despite installation warnings" + return 0 + else + return 1 + fi + fi +} + # Detect package manager and install jq and curl if command -v apt-get >/dev/null 2>&1; then # Debian/Ubuntu info "Detected apt-get (Debian/Ubuntu)" echo "" + + # Check for broken packages + if dpkg -l | grep -q "^iH\|^iF" 2>/dev/null; then + if [[ "$FORCE_INSTALL" == "true" ]]; then + warning "Detected broken packages on system - force mode will work around them" + else + warning "โš ๏ธ Broken packages detected on system" + warning "If installation fails, retry with: curl -s {URL}/api/v1/hosts/install --force -H ..." + fi + fi + info "Updating package lists..." - apt-get update + apt-get update || true echo "" info "Installing jq, curl, and bc..." - apt-get install jq curl bc -y + install_apt_packages jq curl bc elif command -v yum >/dev/null 2>&1; then # CentOS/RHEL 7 info "Detected yum (CentOS/RHEL 7)" @@ -261,6 +358,33 @@ if [[ -f "/var/log/patchmon-agent.log" ]]; then fi # Step 4: Test the configuration +# Check if this machine is already enrolled +info "๐Ÿ” Checking if machine is already enrolled..." +existing_check=$(curl $CURL_FLAGS -s -X POST \ + -H "X-API-ID: $API_ID" \ + -H "X-API-KEY: $API_KEY" \ + -H "Content-Type: application/json" \ + -d "{\"machine_id\": \"$MACHINE_ID\"}" \ + "$PATCHMON_URL/api/v1/hosts/check-machine-id" \ + -w "\n%{http_code}" 2>&1) + +http_code=$(echo "$existing_check" | tail -n 1) +response_body=$(echo "$existing_check" | sed '$d') + +if [[ "$http_code" == "200" ]]; then + already_enrolled=$(echo "$response_body" | jq -r '.exists' 2>/dev/null || echo "false") + if [[ "$already_enrolled" == "true" ]]; then + warning "โš ๏ธ This machine is already enrolled in PatchMon" + info "Machine ID: $MACHINE_ID" + info "Existing host: $(echo "$response_body" | jq -r '.host.friendly_name' 2>/dev/null)" + info "" + info "The agent will be reinstalled/updated with existing credentials." + echo "" + else + success "โœ… Machine not yet enrolled - proceeding with installation" + fi +fi + info "๐Ÿงช Testing API credentials and connectivity..." if /usr/local/bin/patchmon-agent.sh test; then success "โœ… TEST: API credentials are valid and server is reachable" diff --git a/agents/proxmox_auto_enroll.sh b/agents/proxmox_auto_enroll.sh new file mode 100755 index 0000000..f999b90 --- /dev/null +++ b/agents/proxmox_auto_enroll.sh @@ -0,0 +1,437 @@ +#!/bin/bash +set -eo pipefail # Exit on error, pipe failures (removed -u as we handle unset vars explicitly) + +# Trap to catch errors only (not normal exits) +trap 'echo "[ERROR] Script failed at line $LINENO with exit code $?"' ERR + +SCRIPT_VERSION="2.0.0" +echo "[DEBUG] Script Version: $SCRIPT_VERSION ($(date +%Y-%m-%d\ %H:%M:%S))" + +# ============================================================================= +# PatchMon Proxmox LXC Auto-Enrollment Script +# ============================================================================= +# This script discovers LXC containers on a Proxmox host and automatically +# enrolls them into PatchMon for patch management. +# +# Usage: +# 1. Set environment variables or edit configuration below +# 2. Run: bash proxmox_auto_enroll.sh +# +# Requirements: +# - Must run on Proxmox host (requires 'pct' command) +# - Auto-enrollment token from PatchMon +# - Network access to PatchMon server +# ============================================================================= + +# ===== CONFIGURATION ===== +PATCHMON_URL="${PATCHMON_URL:-https://patchmon.example.com}" +AUTO_ENROLLMENT_KEY="${AUTO_ENROLLMENT_KEY:-}" +AUTO_ENROLLMENT_SECRET="${AUTO_ENROLLMENT_SECRET:-}" +CURL_FLAGS="${CURL_FLAGS:--s}" +DRY_RUN="${DRY_RUN:-false}" +HOST_PREFIX="${HOST_PREFIX:-}" +SKIP_STOPPED="${SKIP_STOPPED:-true}" +PARALLEL_INSTALL="${PARALLEL_INSTALL:-false}" +MAX_PARALLEL="${MAX_PARALLEL:-5}" +FORCE_INSTALL="${FORCE_INSTALL:-false}" + +# ===== COLOR OUTPUT ===== +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +NC='\033[0m' # No Color + +# ===== LOGGING FUNCTIONS ===== +info() { echo -e "${GREEN}[INFO]${NC} $1"; return 0; } +warn() { echo -e "${YELLOW}[WARN]${NC} $1"; return 0; } +error() { echo -e "${RED}[ERROR]${NC} $1"; exit 1; } +success() { echo -e "${GREEN}[SUCCESS]${NC} $1"; return 0; } +debug() { [[ "${DEBUG:-false}" == "true" ]] && echo -e "${BLUE}[DEBUG]${NC} $1" || true; return 0; } + +# ===== BANNER ===== +cat << "EOF" +โ•”โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•— +โ•‘ โ•‘ +โ•‘ ____ _ _ __ __ โ•‘ +โ•‘ | _ \ __ _| |_ ___| |__ | \/ | ___ _ __ โ•‘ +โ•‘ | |_) / _` | __/ __| '_ \| |\/| |/ _ \| '_ \ โ•‘ +โ•‘ | __/ (_| | || (__| | | | | | | (_) | | | | โ•‘ +โ•‘ |_| \__,_|\__\___|_| |_|_| |_|\___/|_| |_| โ•‘ +โ•‘ โ•‘ +โ•‘ Proxmox LXC Auto-Enrollment Script โ•‘ +โ•‘ โ•‘ +โ•šโ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +EOF +echo "" + +# ===== VALIDATION ===== +info "Validating configuration..." + +if [[ -z "$AUTO_ENROLLMENT_KEY" ]] || [[ -z "$AUTO_ENROLLMENT_SECRET" ]]; then + error "AUTO_ENROLLMENT_KEY and AUTO_ENROLLMENT_SECRET must be set" +fi + +if [[ -z "$PATCHMON_URL" ]]; then + error "PATCHMON_URL must be set" +fi + +# Check if running on Proxmox +if ! command -v pct &> /dev/null; then + error "This script must run on a Proxmox host (pct command not found)" +fi + +# Check for required commands +for cmd in curl jq; do + if ! command -v $cmd &> /dev/null; then + error "Required command '$cmd' not found. Please install it first." + fi +done + +info "Configuration validated successfully" +info "PatchMon Server: $PATCHMON_URL" +info "Dry Run Mode: $DRY_RUN" +info "Skip Stopped Containers: $SKIP_STOPPED" +echo "" + +# ===== DISCOVER LXC CONTAINERS ===== +info "Discovering LXC containers..." +lxc_list=$(pct list | tail -n +2) # Skip header + +if [[ -z "$lxc_list" ]]; then + warn "No LXC containers found on this Proxmox host" + exit 0 +fi + +# Count containers +total_containers=$(echo "$lxc_list" | wc -l) +info "Found $total_containers LXC container(s)" +echo "" + +info "Initializing statistics..." +# ===== STATISTICS ===== +enrolled_count=0 +skipped_count=0 +failed_count=0 + +# Track containers with dpkg errors for later recovery +declare -A dpkg_error_containers + +# Track all failed containers for summary +declare -A failed_containers +info "Statistics initialized" + +# ===== PROCESS CONTAINERS ===== +info "Starting container processing loop..." +while IFS= read -r line; do + info "[DEBUG] Read line from lxc_list" + vmid=$(echo "$line" | awk '{print $1}') + status=$(echo "$line" | awk '{print $2}') + name=$(echo "$line" | awk '{print $3}') + + info "Processing LXC $vmid: $name (status: $status)" + + # Skip stopped containers if configured + if [[ "$status" != "running" ]] && [[ "$SKIP_STOPPED" == "true" ]]; then + warn " Skipping $name - container not running" + ((skipped_count++)) || true + echo "" + continue + fi + + # Check if container is stopped + if [[ "$status" != "running" ]]; then + warn " Container $name is stopped - cannot gather info or install agent" + ((skipped_count++)) || true + echo "" + continue + fi + + # Get container details + debug " Gathering container information..." + hostname=$(timeout 5 pct exec "$vmid" -- hostname 2>/dev/null /dev/null /dev/null /dev/null || cat /var/lib/dbus/machine-id 2>/dev/null || echo 'proxmox-lxc-$vmid-'$(cat /proc/sys/kernel/random/uuid)" /dev/null || echo "proxmox-lxc-$vmid-unknown") + + friendly_name="${HOST_PREFIX}${hostname}" + + info " Hostname: $hostname" + info " IP Address: $ip_address" + info " OS: $os_info" + info " Machine ID: ${machine_id:0:16}..." + + if [[ "$DRY_RUN" == "true" ]]; then + info " [DRY RUN] Would enroll: $friendly_name" + ((enrolled_count++)) || true + echo "" + continue + fi + + # Call PatchMon auto-enrollment API + info " Enrolling $friendly_name in PatchMon..." + + response=$(curl $CURL_FLAGS -X POST \ + -H "X-Auto-Enrollment-Key: $AUTO_ENROLLMENT_KEY" \ + -H "X-Auto-Enrollment-Secret: $AUTO_ENROLLMENT_SECRET" \ + -H "Content-Type: application/json" \ + -d "{ + \"friendly_name\": \"$friendly_name\", + \"machine_id\": \"$machine_id\", + \"metadata\": { + \"vmid\": \"$vmid\", + \"proxmox_node\": \"$(hostname)\", + \"ip_address\": \"$ip_address\", + \"os_info\": \"$os_info\" + } + }" \ + "$PATCHMON_URL/api/v1/auto-enrollment/enroll" \ + -w "\n%{http_code}" 2>&1) + + http_code=$(echo "$response" | tail -n 1) + body=$(echo "$response" | sed '$d') + + if [[ "$http_code" == "201" ]]; then + api_id=$(echo "$body" | jq -r '.host.api_id' 2>/dev/null || echo "") + api_key=$(echo "$body" | jq -r '.host.api_key' 2>/dev/null || echo "") + + if [[ -z "$api_id" ]] || [[ -z "$api_key" ]]; then + error " Failed to parse API credentials from response" + fi + + info " โœ“ Host enrolled successfully: $api_id" + + # Ensure curl is installed in the container + info " Checking for curl in container..." + curl_check=$(timeout 10 pct exec "$vmid" -- bash -c "command -v curl >/dev/null 2>&1 && echo 'installed' || echo 'missing'" 2>/dev/null /dev/null 2>&1; then + export DEBIAN_FRONTEND=noninteractive + apt-get update -qq && apt-get install -y -qq curl + elif command -v yum >/dev/null 2>&1; then + yum install -y -q curl + elif command -v dnf >/dev/null 2>&1; then + dnf install -y -q curl + elif command -v apk >/dev/null 2>&1; then + apk add --no-cache curl + else + echo 'ERROR: No supported package manager found' + exit 1 + fi + " 2>&1 &1 180s) in $friendly_name" + info " Install output: $install_output" + # Store failure details + failed_containers["$vmid"]="$friendly_name|Timeout (>180s)|$install_output" + ((failed_count++)) || true + else + # Check if it's a dpkg error + if [[ "$install_output" == *"dpkg was interrupted"* ]] || [[ "$install_output" == *"dpkg --configure -a"* ]]; then + warn " โš  Failed due to dpkg error in $friendly_name (can be fixed)" + dpkg_error_containers["$vmid"]="$friendly_name:$api_id:$api_key" + # Store failure details + failed_containers["$vmid"]="$friendly_name|dpkg error|$install_output" + else + warn " โœ— Failed to install agent in $friendly_name (exit: $install_exit_code)" + # Store failure details + failed_containers["$vmid"]="$friendly_name|Exit code $install_exit_code|$install_output" + fi + info " Install output: $install_output" + ((failed_count++)) || true + fi + + elif [[ "$http_code" == "409" ]]; then + warn " โŠ˜ Host $friendly_name already enrolled - skipping" + ((skipped_count++)) || true + elif [[ "$http_code" == "429" ]]; then + error " โœ— Rate limit exceeded - maximum hosts per day reached" + failed_containers["$vmid"]="$friendly_name|Rate limit exceeded|$body" + ((failed_count++)) || true + else + error " โœ— Failed to enroll $friendly_name - HTTP $http_code" + debug " Response: $body" + failed_containers["$vmid"]="$friendly_name|HTTP $http_code enrollment failed|$body" + ((failed_count++)) || true + fi + + echo "" + sleep 1 # Rate limiting between containers + +done <<< "$lxc_list" + +# ===== SUMMARY ===== +echo "" +echo "โ•”โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•—" +echo "โ•‘ ENROLLMENT SUMMARY โ•‘" +echo "โ•šโ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•" +echo "" +info "Total Containers Found: $total_containers" +info "Successfully Enrolled: $enrolled_count" +info "Skipped: $skipped_count" +info "Failed: $failed_count" +echo "" + +# ===== FAILURE DETAILS ===== +if [[ ${#failed_containers[@]} -gt 0 ]]; then + echo "โ•”โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•—" + echo "โ•‘ FAILURE DETAILS โ•‘" + echo "โ•šโ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•" + echo "" + + for vmid in "${!failed_containers[@]}"; do + IFS='|' read -r name reason output <<< "${failed_containers[$vmid]}" + + warn "Container $vmid: $name" + info " Reason: $reason" + info " Last 5 lines of output:" + + # Get last 5 lines of output + last_5_lines=$(echo "$output" | tail -n 5) + + # Display each line with proper indentation + while IFS= read -r line; do + echo " $line" + done <<< "$last_5_lines" + + echo "" + done +fi + +if [[ "$DRY_RUN" == "true" ]]; then + warn "This was a DRY RUN - no actual changes were made" + warn "Set DRY_RUN=false to perform actual enrollment" +fi + +# ===== DPKG ERROR RECOVERY ===== +if [[ ${#dpkg_error_containers[@]} -gt 0 ]]; then + echo "" + echo "โ•”โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•—" + echo "โ•‘ DPKG ERROR RECOVERY AVAILABLE โ•‘" + echo "โ•šโ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•" + echo "" + warn "Detected ${#dpkg_error_containers[@]} container(s) with dpkg errors:" + for vmid in "${!dpkg_error_containers[@]}"; do + IFS=':' read -r name api_id api_key <<< "${dpkg_error_containers[$vmid]}" + info " โ€ข Container $vmid: $name" + done + echo "" + + # Ask user if they want to fix dpkg errors + read -p "Would you like to fix dpkg errors and retry installation? (y/N): " -n 1 -r + echo "" + + if [[ $REPLY =~ ^[Yy]$ ]]; then + echo "" + info "Starting dpkg recovery process..." + echo "" + + recovered_count=0 + + for vmid in "${!dpkg_error_containers[@]}"; do + IFS=':' read -r name api_id api_key <<< "${dpkg_error_containers[$vmid]}" + + info "Fixing dpkg in container $vmid ($name)..." + + # Run dpkg --configure -a + dpkg_output=$(timeout 60 pct exec "$vmid" -- dpkg --configure -a 2>&1 &1 { } // Verify token - const decoded = jwt.verify( - token, - process.env.JWT_SECRET || "your-secret-key", - ); + if (!process.env.JWT_SECRET) { + throw new Error("JWT_SECRET environment variable is required"); + } + const decoded = jwt.verify(token, process.env.JWT_SECRET); // Validate session and check inactivity timeout const validation = await validate_session(decoded.sessionId, token); @@ -85,10 +85,10 @@ const optionalAuth = async (req, _res, next) => { const token = authHeader?.split(" ")[1]; if (token) { - const decoded = jwt.verify( - token, - process.env.JWT_SECRET || "your-secret-key", - ); + if (!process.env.JWT_SECRET) { + throw new Error("JWT_SECRET environment variable is required"); + } + const decoded = jwt.verify(token, process.env.JWT_SECRET); const user = await prisma.users.findUnique({ where: { id: decoded.userId }, select: { diff --git a/backend/src/routes/authRoutes.js b/backend/src/routes/authRoutes.js index 9d04c58..6f68209 100644 --- a/backend/src/routes/authRoutes.js +++ b/backend/src/routes/authRoutes.js @@ -156,7 +156,10 @@ router.post( // Generate JWT token const generateToken = (userId) => { - return jwt.sign({ userId }, process.env.JWT_SECRET || "your-secret-key", { + if (!process.env.JWT_SECRET) { + throw new Error("JWT_SECRET environment variable is required"); + } + return jwt.sign({ userId }, process.env.JWT_SECRET, { expiresIn: process.env.JWT_EXPIRES_IN || "24h", }); }; diff --git a/backend/src/routes/autoEnrollmentRoutes.js b/backend/src/routes/autoEnrollmentRoutes.js new file mode 100644 index 0000000..4f7f056 --- /dev/null +++ b/backend/src/routes/autoEnrollmentRoutes.js @@ -0,0 +1,745 @@ +const express = require("express"); +const { PrismaClient } = require("@prisma/client"); +const crypto = require("node:crypto"); +const bcrypt = require("bcryptjs"); +const { body, validationResult } = require("express-validator"); +const { authenticateToken } = require("../middleware/auth"); +const { requireManageSettings } = require("../middleware/permissions"); +const { v4: uuidv4 } = require("uuid"); + +const router = express.Router(); +const prisma = new PrismaClient(); + +// Generate auto-enrollment token credentials +const generate_auto_enrollment_token = () => { + const token_key = `patchmon_ae_${crypto.randomBytes(16).toString("hex")}`; + const token_secret = crypto.randomBytes(48).toString("hex"); + return { token_key, token_secret }; +}; + +// Middleware to validate auto-enrollment token +const validate_auto_enrollment_token = async (req, res, next) => { + try { + const token_key = req.headers["x-auto-enrollment-key"]; + const token_secret = req.headers["x-auto-enrollment-secret"]; + + if (!token_key || !token_secret) { + return res + .status(401) + .json({ error: "Auto-enrollment credentials required" }); + } + + // Find token + const token = await prisma.auto_enrollment_tokens.findUnique({ + where: { token_key: token_key }, + }); + + if (!token || !token.is_active) { + return res.status(401).json({ error: "Invalid or inactive token" }); + } + + // Verify secret (hashed) + const is_valid = await bcrypt.compare(token_secret, token.token_secret); + if (!is_valid) { + return res.status(401).json({ error: "Invalid token secret" }); + } + + // Check expiration + if (token.expires_at && new Date() > new Date(token.expires_at)) { + return res.status(401).json({ error: "Token expired" }); + } + + // Check IP whitelist if configured + if (token.allowed_ip_ranges && token.allowed_ip_ranges.length > 0) { + const client_ip = req.ip || req.connection.remoteAddress; + // Basic IP check - can be enhanced with CIDR matching + const ip_allowed = token.allowed_ip_ranges.some((allowed_ip) => { + return client_ip.includes(allowed_ip); + }); + + if (!ip_allowed) { + console.warn( + `Auto-enrollment attempt from unauthorized IP: ${client_ip}`, + ); + return res + .status(403) + .json({ error: "IP address not authorized for this token" }); + } + } + + // Check rate limit (hosts per day) + const today = new Date().toISOString().split("T")[0]; + const token_reset_date = token.last_reset_date.toISOString().split("T")[0]; + + if (token_reset_date !== today) { + // Reset daily counter + await prisma.auto_enrollment_tokens.update({ + where: { id: token.id }, + data: { + hosts_created_today: 0, + last_reset_date: new Date(), + updated_at: new Date(), + }, + }); + token.hosts_created_today = 0; + } + + if (token.hosts_created_today >= token.max_hosts_per_day) { + return res.status(429).json({ + error: "Rate limit exceeded", + message: `Maximum ${token.max_hosts_per_day} hosts per day allowed for this token`, + }); + } + + req.auto_enrollment_token = token; + next(); + } catch (error) { + console.error("Auto-enrollment token validation error:", error); + res.status(500).json({ error: "Token validation failed" }); + } +}; + +// ========== ADMIN ENDPOINTS (Manage Tokens) ========== + +// Create auto-enrollment token +router.post( + "/tokens", + authenticateToken, + requireManageSettings, + [ + body("token_name") + .isLength({ min: 1, max: 255 }) + .withMessage("Token name is required (max 255 characters)"), + body("allowed_ip_ranges") + .optional() + .isArray() + .withMessage("Allowed IP ranges must be an array"), + body("max_hosts_per_day") + .optional() + .isInt({ min: 1, max: 1000 }) + .withMessage("Max hosts per day must be between 1 and 1000"), + body("default_host_group_id") + .optional({ nullable: true, checkFalsy: true }) + .isString(), + body("expires_at") + .optional({ nullable: true, checkFalsy: true }) + .isISO8601() + .withMessage("Invalid date format"), + ], + async (req, res) => { + try { + const errors = validationResult(req); + if (!errors.isEmpty()) { + return res.status(400).json({ errors: errors.array() }); + } + + const { + token_name, + allowed_ip_ranges = [], + max_hosts_per_day = 100, + default_host_group_id, + expires_at, + metadata = {}, + } = req.body; + + // Validate host group if provided + if (default_host_group_id) { + const host_group = await prisma.host_groups.findUnique({ + where: { id: default_host_group_id }, + }); + + if (!host_group) { + return res.status(400).json({ error: "Host group not found" }); + } + } + + const { token_key, token_secret } = generate_auto_enrollment_token(); + const hashed_secret = await bcrypt.hash(token_secret, 10); + + const token = await prisma.auto_enrollment_tokens.create({ + data: { + id: uuidv4(), + token_name, + token_key: token_key, + token_secret: hashed_secret, + created_by_user_id: req.user.id, + allowed_ip_ranges, + max_hosts_per_day, + default_host_group_id: default_host_group_id || null, + expires_at: expires_at ? new Date(expires_at) : null, + metadata: { integration_type: "proxmox-lxc", ...metadata }, + updated_at: new Date(), + }, + include: { + host_groups: { + select: { + id: true, + name: true, + color: true, + }, + }, + users: { + select: { + id: true, + username: true, + first_name: true, + last_name: true, + }, + }, + }, + }); + + // Return unhashed secret ONLY once (like API keys) + res.status(201).json({ + message: "Auto-enrollment token created successfully", + token: { + id: token.id, + token_name: token.token_name, + token_key: token_key, + token_secret: token_secret, // ONLY returned here! + max_hosts_per_day: token.max_hosts_per_day, + default_host_group: token.host_groups, + created_by: token.users, + expires_at: token.expires_at, + }, + warning: "โš ๏ธ Save the token_secret now - it cannot be retrieved later!", + }); + } catch (error) { + console.error("Create auto-enrollment token error:", error); + res.status(500).json({ error: "Failed to create token" }); + } + }, +); + +// List auto-enrollment tokens +router.get( + "/tokens", + authenticateToken, + requireManageSettings, + async (_req, res) => { + try { + const tokens = await prisma.auto_enrollment_tokens.findMany({ + select: { + id: true, + token_name: true, + token_key: true, + is_active: true, + allowed_ip_ranges: true, + max_hosts_per_day: true, + hosts_created_today: true, + last_used_at: true, + expires_at: true, + created_at: true, + default_host_group_id: true, + metadata: true, + host_groups: { + select: { + id: true, + name: true, + color: true, + }, + }, + users: { + select: { + id: true, + username: true, + first_name: true, + last_name: true, + }, + }, + }, + orderBy: { created_at: "desc" }, + }); + + res.json(tokens); + } catch (error) { + console.error("List auto-enrollment tokens error:", error); + res.status(500).json({ error: "Failed to list tokens" }); + } + }, +); + +// Get single token details +router.get( + "/tokens/:tokenId", + authenticateToken, + requireManageSettings, + async (req, res) => { + try { + const { tokenId } = req.params; + + const token = await prisma.auto_enrollment_tokens.findUnique({ + where: { id: tokenId }, + include: { + host_groups: { + select: { + id: true, + name: true, + color: true, + }, + }, + users: { + select: { + id: true, + username: true, + first_name: true, + last_name: true, + }, + }, + }, + }); + + if (!token) { + return res.status(404).json({ error: "Token not found" }); + } + + // Don't include the secret in response + const { token_secret: _secret, ...token_data } = token; + + res.json(token_data); + } catch (error) { + console.error("Get token error:", error); + res.status(500).json({ error: "Failed to get token" }); + } + }, +); + +// Update token (toggle active state, update limits, etc.) +router.patch( + "/tokens/:tokenId", + authenticateToken, + requireManageSettings, + [ + body("is_active").optional().isBoolean(), + body("max_hosts_per_day").optional().isInt({ min: 1, max: 1000 }), + body("allowed_ip_ranges").optional().isArray(), + body("expires_at").optional().isISO8601(), + ], + async (req, res) => { + try { + const errors = validationResult(req); + if (!errors.isEmpty()) { + return res.status(400).json({ errors: errors.array() }); + } + + const { tokenId } = req.params; + const update_data = { updated_at: new Date() }; + + if (req.body.is_active !== undefined) + update_data.is_active = req.body.is_active; + if (req.body.max_hosts_per_day !== undefined) + update_data.max_hosts_per_day = req.body.max_hosts_per_day; + if (req.body.allowed_ip_ranges !== undefined) + update_data.allowed_ip_ranges = req.body.allowed_ip_ranges; + if (req.body.expires_at !== undefined) + update_data.expires_at = new Date(req.body.expires_at); + + const token = await prisma.auto_enrollment_tokens.update({ + where: { id: tokenId }, + data: update_data, + include: { + host_groups: true, + users: { + select: { + username: true, + first_name: true, + last_name: true, + }, + }, + }, + }); + + const { token_secret: _secret, ...token_data } = token; + + res.json({ + message: "Token updated successfully", + token: token_data, + }); + } catch (error) { + console.error("Update token error:", error); + res.status(500).json({ error: "Failed to update token" }); + } + }, +); + +// Delete token +router.delete( + "/tokens/:tokenId", + authenticateToken, + requireManageSettings, + async (req, res) => { + try { + const { tokenId } = req.params; + + const token = await prisma.auto_enrollment_tokens.findUnique({ + where: { id: tokenId }, + }); + + if (!token) { + return res.status(404).json({ error: "Token not found" }); + } + + await prisma.auto_enrollment_tokens.delete({ + where: { id: tokenId }, + }); + + res.json({ + message: "Auto-enrollment token deleted successfully", + deleted_token: { + id: token.id, + token_name: token.token_name, + }, + }); + } catch (error) { + console.error("Delete token error:", error); + res.status(500).json({ error: "Failed to delete token" }); + } + }, +); + +// ========== AUTO-ENROLLMENT ENDPOINTS (Used by Scripts) ========== +// Future integrations can follow this pattern: +// - /proxmox-lxc - Proxmox LXC containers +// - /vmware-esxi - VMware ESXi VMs +// - /docker - Docker containers +// - /kubernetes - Kubernetes pods +// - /aws-ec2 - AWS EC2 instances + +// Serve the Proxmox LXC enrollment script with credentials injected +router.get("/proxmox-lxc", async (req, res) => { + try { + // Get token from query params + const token_key = req.query.token_key; + const token_secret = req.query.token_secret; + + if (!token_key || !token_secret) { + return res + .status(401) + .json({ error: "Token key and secret required as query parameters" }); + } + + // Validate token + const token = await prisma.auto_enrollment_tokens.findUnique({ + where: { token_key: token_key }, + }); + + if (!token || !token.is_active) { + return res.status(401).json({ error: "Invalid or inactive token" }); + } + + // Verify secret + const is_valid = await bcrypt.compare(token_secret, token.token_secret); + if (!is_valid) { + return res.status(401).json({ error: "Invalid token secret" }); + } + + // Check expiration + if (token.expires_at && new Date() > new Date(token.expires_at)) { + return res.status(401).json({ error: "Token expired" }); + } + + const fs = require("node:fs"); + const path = require("node:path"); + + const script_path = path.join( + __dirname, + "../../../agents/proxmox_auto_enroll.sh", + ); + + if (!fs.existsSync(script_path)) { + return res + .status(404) + .json({ error: "Proxmox enrollment script not found" }); + } + + let script = fs.readFileSync(script_path, "utf8"); + + // Convert Windows line endings to Unix line endings + script = script.replace(/\r\n/g, "\n").replace(/\r/g, "\n"); + + // Get the configured server URL from settings + let server_url = "http://localhost:3001"; + try { + const settings = await prisma.settings.findFirst(); + if (settings?.server_url) { + server_url = settings.server_url; + } + } catch (settings_error) { + console.warn( + "Could not fetch settings, using default server URL:", + settings_error.message, + ); + } + + // Determine curl flags dynamically from settings + let curl_flags = "-s"; + try { + const settings = await prisma.settings.findFirst(); + if (settings && settings.ignore_ssl_self_signed === true) { + curl_flags = "-sk"; + } + } catch (_) {} + + // Check for --force parameter + const force_install = req.query.force === "true" || req.query.force === "1"; + + // Inject the token credentials, server URL, curl flags, and force flag into the script + const env_vars = `#!/bin/bash +# PatchMon Auto-Enrollment Configuration (Auto-generated) +export PATCHMON_URL="${server_url}" +export AUTO_ENROLLMENT_KEY="${token.token_key}" +export AUTO_ENROLLMENT_SECRET="${token_secret}" +export CURL_FLAGS="${curl_flags}" +export FORCE_INSTALL="${force_install ? "true" : "false"}" + +`; + + // Remove the shebang and configuration section from the original script + script = script.replace(/^#!/, "#"); + + // Remove the configuration section (between # ===== CONFIGURATION ===== and the next # =====) + script = script.replace( + /# ===== CONFIGURATION =====[\s\S]*?(?=# ===== COLOR OUTPUT =====)/, + "", + ); + + script = env_vars + script; + + res.setHeader("Content-Type", "text/plain"); + res.setHeader( + "Content-Disposition", + 'inline; filename="proxmox_auto_enroll.sh"', + ); + res.send(script); + } catch (error) { + console.error("Proxmox script serve error:", error); + res.status(500).json({ error: "Failed to serve enrollment script" }); + } +}); + +// Create host via auto-enrollment +router.post( + "/enroll", + validate_auto_enrollment_token, + [ + body("friendly_name") + .isLength({ min: 1, max: 255 }) + .withMessage("Friendly name is required"), + body("machine_id") + .isLength({ min: 1, max: 255 }) + .withMessage("Machine ID is required"), + body("metadata").optional().isObject(), + ], + async (req, res) => { + try { + const errors = validationResult(req); + if (!errors.isEmpty()) { + return res.status(400).json({ errors: errors.array() }); + } + + const { friendly_name, machine_id } = req.body; + + // Generate host API credentials + const api_id = `patchmon_${crypto.randomBytes(8).toString("hex")}`; + const api_key = crypto.randomBytes(32).toString("hex"); + + // Check if host already exists by machine_id (not hostname) + const existing_host = await prisma.hosts.findUnique({ + where: { machine_id }, + }); + + if (existing_host) { + return res.status(409).json({ + error: "Host already exists", + host_id: existing_host.id, + api_id: existing_host.api_id, + machine_id: existing_host.machine_id, + friendly_name: existing_host.friendly_name, + message: + "This machine is already enrolled in PatchMon (matched by machine ID)", + }); + } + + // Create host + const host = await prisma.hosts.create({ + data: { + id: uuidv4(), + machine_id, + friendly_name, + os_type: "unknown", + os_version: "unknown", + api_id: api_id, + api_key: api_key, + host_group_id: req.auto_enrollment_token.default_host_group_id, + status: "pending", + notes: `Auto-enrolled via ${req.auto_enrollment_token.token_name} on ${new Date().toISOString()}`, + updated_at: new Date(), + }, + include: { + host_groups: { + select: { + id: true, + name: true, + color: true, + }, + }, + }, + }); + + // Update token usage stats + await prisma.auto_enrollment_tokens.update({ + where: { id: req.auto_enrollment_token.id }, + data: { + hosts_created_today: { increment: 1 }, + last_used_at: new Date(), + updated_at: new Date(), + }, + }); + + console.log( + `Auto-enrolled host: ${friendly_name} (${host.id}) via token: ${req.auto_enrollment_token.token_name}`, + ); + + res.status(201).json({ + message: "Host enrolled successfully", + host: { + id: host.id, + friendly_name: host.friendly_name, + api_id: api_id, + api_key: api_key, + host_group: host.host_groups, + status: host.status, + }, + }); + } catch (error) { + console.error("Auto-enrollment error:", error); + res.status(500).json({ error: "Failed to enroll host" }); + } + }, +); + +// Bulk enroll multiple hosts at once +router.post( + "/enroll/bulk", + validate_auto_enrollment_token, + [ + body("hosts") + .isArray({ min: 1, max: 50 }) + .withMessage("Hosts array required (max 50)"), + body("hosts.*.friendly_name") + .isLength({ min: 1 }) + .withMessage("Each host needs a friendly_name"), + ], + async (req, res) => { + try { + const errors = validationResult(req); + if (!errors.isEmpty()) { + return res.status(400).json({ errors: errors.array() }); + } + + const { hosts } = req.body; + + // Check rate limit + const remaining_quota = + req.auto_enrollment_token.max_hosts_per_day - + req.auto_enrollment_token.hosts_created_today; + + if (hosts.length > remaining_quota) { + return res.status(429).json({ + error: "Rate limit exceeded", + message: `Only ${remaining_quota} hosts remaining in daily quota`, + }); + } + + const results = { + success: [], + failed: [], + skipped: [], + }; + + for (const host_data of hosts) { + try { + const { friendly_name, machine_id } = host_data; + + if (!machine_id) { + results.failed.push({ + friendly_name, + error: "Machine ID is required", + }); + continue; + } + + // Check if host already exists by machine_id + const existing_host = await prisma.hosts.findUnique({ + where: { machine_id }, + }); + + if (existing_host) { + results.skipped.push({ + friendly_name, + machine_id, + reason: "Machine already enrolled", + api_id: existing_host.api_id, + }); + continue; + } + + // Generate credentials + const api_id = `patchmon_${crypto.randomBytes(8).toString("hex")}`; + const api_key = crypto.randomBytes(32).toString("hex"); + + // Create host + const host = await prisma.hosts.create({ + data: { + id: uuidv4(), + machine_id, + friendly_name, + os_type: "unknown", + os_version: "unknown", + api_id: api_id, + api_key: api_key, + host_group_id: req.auto_enrollment_token.default_host_group_id, + status: "pending", + notes: `Auto-enrolled via ${req.auto_enrollment_token.token_name} on ${new Date().toISOString()}`, + updated_at: new Date(), + }, + }); + + results.success.push({ + id: host.id, + friendly_name: host.friendly_name, + api_id: api_id, + api_key: api_key, + }); + } catch (error) { + results.failed.push({ + friendly_name: host_data.friendly_name, + error: error.message, + }); + } + } + + // Update token usage stats + if (results.success.length > 0) { + await prisma.auto_enrollment_tokens.update({ + where: { id: req.auto_enrollment_token.id }, + data: { + hosts_created_today: { increment: results.success.length }, + last_used_at: new Date(), + updated_at: new Date(), + }, + }); + } + + res.status(201).json({ + message: `Bulk enrollment completed: ${results.success.length} succeeded, ${results.failed.length} failed, ${results.skipped.length} skipped`, + results, + }); + } catch (error) { + console.error("Bulk auto-enrollment error:", error); + res.status(500).json({ error: "Failed to bulk enroll hosts" }); + } + }, +); + +module.exports = router; diff --git a/backend/src/routes/dashboardRoutes.js b/backend/src/routes/dashboardRoutes.js index fb54447..5169873 100644 --- a/backend/src/routes/dashboardRoutes.js +++ b/backend/src/routes/dashboardRoutes.js @@ -185,6 +185,7 @@ router.get("/hosts", authenticateToken, requireViewHosts, async (_req, res) => { // Show all hosts regardless of status select: { id: true, + machine_id: true, friendly_name: true, hostname: true, ip: true, diff --git a/backend/src/routes/hostRoutes.js b/backend/src/routes/hostRoutes.js index 842f286..5a076a5 100644 --- a/backend/src/routes/hostRoutes.js +++ b/backend/src/routes/hostRoutes.js @@ -172,15 +172,6 @@ router.post( // Generate unique API credentials for this host const { apiId, apiKey } = generateApiCredentials(); - // Check if host already exists - const existingHost = await prisma.hosts.findUnique({ - where: { friendly_name: friendly_name }, - }); - - if (existingHost) { - return res.status(409).json({ error: "Host already exists" }); - } - // If hostGroupId is provided, verify the group exists if (hostGroupId) { const hostGroup = await prisma.host_groups.findUnique({ @@ -196,6 +187,7 @@ router.post( const host = await prisma.hosts.create({ data: { id: uuidv4(), + machine_id: `pending-${uuidv4()}`, // Temporary placeholder until agent connects with real machine_id friendly_name: friendly_name, os_type: "unknown", // Will be updated when agent connects os_version: "unknown", // Will be updated when agent connects @@ -321,6 +313,10 @@ router.post( .optional() .isArray() .withMessage("Load average must be an array"), + body("machineId") + .optional() + .isString() + .withMessage("Machine ID must be a string"), ], async (req, res) => { try { @@ -338,6 +334,11 @@ router.post( updated_at: new Date(), }; + // Update machine_id if provided and current one is a placeholder + if (req.body.machineId && host.machine_id.startsWith("pending-")) { + updateData.machine_id = req.body.machineId; + } + // Basic system info if (req.body.osType) updateData.os_type = req.body.osType; if (req.body.osVersion) updateData.os_version = req.body.osVersion; @@ -1126,12 +1127,16 @@ router.get("/install", async (req, res) => { } } catch (_) {} - // Inject the API credentials, server URL, and curl flags into the script + // Check for --force parameter + const forceInstall = req.query.force === "true" || req.query.force === "1"; + + // Inject the API credentials, server URL, curl flags, and force flag into the script const envVars = `#!/bin/bash export PATCHMON_URL="${serverUrl}" export API_ID="${host.api_id}" export API_KEY="${host.api_key}" export CURL_FLAGS="${curlFlags}" +export FORCE_INSTALL="${forceInstall ? "true" : "false"}" `; @@ -1151,6 +1156,48 @@ export CURL_FLAGS="${curlFlags}" } }); +// Check if machine_id already exists (requires auth) +router.post("/check-machine-id", validateApiCredentials, async (req, res) => { + try { + const { machine_id } = req.body; + + if (!machine_id) { + return res.status(400).json({ + error: "machine_id is required", + }); + } + + // Check if a host with this machine_id exists + const existing_host = await prisma.hosts.findUnique({ + where: { machine_id }, + select: { + id: true, + friendly_name: true, + machine_id: true, + api_id: true, + status: true, + created_at: true, + }, + }); + + if (existing_host) { + return res.status(200).json({ + exists: true, + host: existing_host, + message: "This machine is already enrolled", + }); + } + + return res.status(200).json({ + exists: false, + message: "Machine not yet enrolled", + }); + } catch (error) { + console.error("Error checking machine_id:", error); + res.status(500).json({ error: "Failed to check machine_id" }); + } +}); + // Serve the removal script (public endpoint - no authentication required) router.get("/remove", async (_req, res) => { try { diff --git a/backend/src/routes/packageRoutes.js b/backend/src/routes/packageRoutes.js index e611bd3..ee2186f 100644 --- a/backend/src/routes/packageRoutes.js +++ b/backend/src/routes/packageRoutes.js @@ -67,7 +67,9 @@ router.get("/", async (req, res) => { latest_version: true, created_at: true, _count: { - host_packages: true, + select: { + host_packages: true, + }, }, }, skip, @@ -82,7 +84,7 @@ router.get("/", async (req, res) => { // Get additional stats for each package const packagesWithStats = await Promise.all( packages.map(async (pkg) => { - const [updatesCount, securityCount, affectedHosts] = await Promise.all([ + const [updatesCount, securityCount, packageHosts] = await Promise.all([ prisma.host_packages.count({ where: { package_id: pkg.id, @@ -117,17 +119,18 @@ router.get("/", async (req, res) => { return { ...pkg, - affectedHostsCount: pkg._count.hostPackages, - affectedHosts: affectedHosts.map((hp) => ({ - hostId: hp.host.id, - friendlyName: hp.host.friendly_name, - osType: hp.host.os_type, + packageHostsCount: pkg._count.host_packages, + packageHosts: packageHosts.map((hp) => ({ + hostId: hp.hosts.id, + friendlyName: hp.hosts.friendly_name, + osType: hp.hosts.os_type, currentVersion: hp.current_version, availableVersion: hp.available_version, + needsUpdate: hp.needs_update, isSecurityUpdate: hp.is_security_update, })), stats: { - totalInstalls: pkg._count.hostPackages, + totalInstalls: pkg._count.host_packages, updatesNeeded: updatesCount, securityUpdates: securityCount, }, @@ -160,19 +163,19 @@ router.get("/:packageId", async (req, res) => { include: { host_packages: { include: { - host: { + hosts: { select: { id: true, hostname: true, ip: true, - osType: true, - osVersion: true, - lastUpdate: true, + os_type: true, + os_version: true, + last_update: true, }, }, }, orderBy: { - needsUpdate: "desc", + needs_update: "desc", }, }, }, @@ -185,25 +188,25 @@ router.get("/:packageId", async (req, res) => { // Calculate statistics const stats = { totalInstalls: packageData.host_packages.length, - updatesNeeded: packageData.host_packages.filter((hp) => hp.needsUpdate) + updatesNeeded: packageData.host_packages.filter((hp) => hp.needs_update) .length, securityUpdates: packageData.host_packages.filter( - (hp) => hp.needsUpdate && hp.isSecurityUpdate, + (hp) => hp.needs_update && hp.is_security_update, ).length, - upToDate: packageData.host_packages.filter((hp) => !hp.needsUpdate) + upToDate: packageData.host_packages.filter((hp) => !hp.needs_update) .length, }; // Group by version const versionDistribution = packageData.host_packages.reduce((acc, hp) => { - const version = hp.currentVersion; + const version = hp.current_version; acc[version] = (acc[version] || 0) + 1; return acc; }, {}); // Group by OS type const osDistribution = packageData.host_packages.reduce((acc, hp) => { - const osType = hp.host.osType; + const osType = hp.hosts.os_type; acc[osType] = (acc[osType] || 0) + 1; return acc; }, {}); @@ -230,4 +233,109 @@ router.get("/:packageId", async (req, res) => { } }); +// Get hosts where a package is installed +router.get("/:packageId/hosts", async (req, res) => { + try { + const { packageId } = req.params; + const { + page = 1, + limit = 25, + search = "", + sortBy = "friendly_name", + sortOrder = "asc", + } = req.query; + + const offset = (parseInt(page, 10) - 1) * parseInt(limit, 10); + + // Build search conditions + const searchConditions = search + ? { + OR: [ + { + hosts: { + friendly_name: { contains: search, mode: "insensitive" }, + }, + }, + { hosts: { hostname: { contains: search, mode: "insensitive" } } }, + { current_version: { contains: search, mode: "insensitive" } }, + { available_version: { contains: search, mode: "insensitive" } }, + ], + } + : {}; + + // Build sort conditions + const orderBy = {}; + if ( + sortBy === "friendly_name" || + sortBy === "hostname" || + sortBy === "os_type" + ) { + orderBy.hosts = { [sortBy]: sortOrder }; + } else if (sortBy === "needs_update") { + orderBy[sortBy] = sortOrder; + } else { + orderBy[sortBy] = sortOrder; + } + + // Get total count + const totalCount = await prisma.host_packages.count({ + where: { + package_id: packageId, + ...searchConditions, + }, + }); + + // Get paginated results + const hostPackages = await prisma.host_packages.findMany({ + where: { + package_id: packageId, + ...searchConditions, + }, + include: { + hosts: { + select: { + id: true, + friendly_name: true, + hostname: true, + os_type: true, + os_version: true, + last_update: true, + }, + }, + }, + orderBy, + skip: offset, + take: parseInt(limit, 10), + }); + + // Transform the data for the frontend + const hosts = hostPackages.map((hp) => ({ + hostId: hp.hosts.id, + friendlyName: hp.hosts.friendly_name, + hostname: hp.hosts.hostname, + osType: hp.hosts.os_type, + osVersion: hp.hosts.os_version, + lastUpdate: hp.hosts.last_update, + currentVersion: hp.current_version, + availableVersion: hp.available_version, + needsUpdate: hp.needs_update, + isSecurityUpdate: hp.is_security_update, + lastChecked: hp.last_checked, + })); + + res.json({ + hosts, + pagination: { + page: parseInt(page, 10), + limit: parseInt(limit, 10), + total: totalCount, + pages: Math.ceil(totalCount / parseInt(limit, 10)), + }, + }); + } catch (error) { + console.error("Error fetching package hosts:", error); + res.status(500).json({ error: "Failed to fetch package hosts" }); + } +}); + module.exports = router; diff --git a/backend/src/routes/repositoryRoutes.js b/backend/src/routes/repositoryRoutes.js index 6763d18..a1c0b10 100644 --- a/backend/src/routes/repositoryRoutes.js +++ b/backend/src/routes/repositoryRoutes.js @@ -289,6 +289,77 @@ router.get( }, ); +// Delete a specific repository (admin only) +router.delete( + "/:repositoryId", + authenticateToken, + requireManageHosts, + async (req, res) => { + try { + const { repositoryId } = req.params; + + // Check if repository exists first + const existingRepository = await prisma.repositories.findUnique({ + where: { id: repositoryId }, + select: { + id: true, + name: true, + url: true, + _count: { + select: { + host_repositories: true, + }, + }, + }, + }); + + if (!existingRepository) { + return res.status(404).json({ + error: "Repository not found", + details: "The repository may have been deleted or does not exist", + }); + } + + // Delete repository and all related data (cascade will handle host_repositories) + await prisma.repositories.delete({ + where: { id: repositoryId }, + }); + + res.json({ + message: "Repository deleted successfully", + deletedRepository: { + id: existingRepository.id, + name: existingRepository.name, + url: existingRepository.url, + hostCount: existingRepository._count.host_repositories, + }, + }); + } catch (error) { + console.error("Repository deletion error:", error); + + // Handle specific Prisma errors + if (error.code === "P2025") { + return res.status(404).json({ + error: "Repository not found", + details: "The repository may have been deleted or does not exist", + }); + } + + if (error.code === "P2003") { + return res.status(400).json({ + error: "Cannot delete repository due to foreign key constraints", + details: "The repository has related data that prevents deletion", + }); + } + + res.status(500).json({ + error: "Failed to delete repository", + details: error.message || "An unexpected error occurred", + }); + } + }, +); + // Cleanup orphaned repositories (admin only) router.delete( "/cleanup/orphaned", diff --git a/backend/src/routes/searchRoutes.js b/backend/src/routes/searchRoutes.js new file mode 100644 index 0000000..456dde4 --- /dev/null +++ b/backend/src/routes/searchRoutes.js @@ -0,0 +1,249 @@ +const express = require("express"); +const router = express.Router(); +const { createPrismaClient } = require("../config/database"); +const { authenticateToken } = require("../middleware/auth"); + +const prisma = createPrismaClient(); + +/** + * Global search endpoint + * Searches across hosts, packages, repositories, and users + * Returns categorized results + */ +router.get("/", authenticateToken, async (req, res) => { + try { + const { q } = req.query; + + if (!q || q.trim().length === 0) { + return res.json({ + hosts: [], + packages: [], + repositories: [], + users: [], + }); + } + + const searchTerm = q.trim(); + + // Prepare results object + const results = { + hosts: [], + packages: [], + repositories: [], + users: [], + }; + + // Get user permissions from database + let userPermissions = null; + try { + userPermissions = await prisma.role_permissions.findUnique({ + where: { role: req.user.role }, + }); + + // If no specific permissions found, default to admin permissions + if (!userPermissions) { + console.warn( + `No permissions found for role: ${req.user.role}, defaulting to admin access`, + ); + userPermissions = { + can_view_hosts: true, + can_view_packages: true, + can_view_users: true, + }; + } + } catch (permError) { + console.error("Error fetching permissions:", permError); + // Default to restrictive permissions on error + userPermissions = { + can_view_hosts: false, + can_view_packages: false, + can_view_users: false, + }; + } + + // Search hosts if user has permission + if (userPermissions.can_view_hosts) { + try { + const hosts = await prisma.hosts.findMany({ + where: { + OR: [ + { hostname: { contains: searchTerm, mode: "insensitive" } }, + { friendly_name: { contains: searchTerm, mode: "insensitive" } }, + { ip: { contains: searchTerm, mode: "insensitive" } }, + { machine_id: { contains: searchTerm, mode: "insensitive" } }, + ], + }, + select: { + id: true, + machine_id: true, + hostname: true, + friendly_name: true, + ip: true, + os_type: true, + os_version: true, + status: true, + last_update: true, + }, + take: 10, // Limit results + orderBy: { + last_update: "desc", + }, + }); + + results.hosts = hosts.map((host) => ({ + id: host.id, + hostname: host.hostname, + friendly_name: host.friendly_name, + ip: host.ip, + os_type: host.os_type, + os_version: host.os_version, + status: host.status, + last_update: host.last_update, + type: "host", + })); + } catch (error) { + console.error("Error searching hosts:", error); + } + } + + // Search packages if user has permission + if (userPermissions.can_view_packages) { + try { + const packages = await prisma.packages.findMany({ + where: { + name: { contains: searchTerm, mode: "insensitive" }, + }, + select: { + id: true, + name: true, + description: true, + category: true, + latest_version: true, + _count: { + select: { + host_packages: true, + }, + }, + }, + take: 10, + orderBy: { + name: "asc", + }, + }); + + results.packages = packages.map((pkg) => ({ + id: pkg.id, + name: pkg.name, + description: pkg.description, + category: pkg.category, + latest_version: pkg.latest_version, + host_count: pkg._count.host_packages, + type: "package", + })); + } catch (error) { + console.error("Error searching packages:", error); + } + } + + // Search repositories if user has permission (usually same as hosts) + if (userPermissions.can_view_hosts) { + try { + const repositories = await prisma.repositories.findMany({ + where: { + OR: [ + { name: { contains: searchTerm, mode: "insensitive" } }, + { url: { contains: searchTerm, mode: "insensitive" } }, + { description: { contains: searchTerm, mode: "insensitive" } }, + ], + }, + select: { + id: true, + name: true, + url: true, + distribution: true, + repo_type: true, + is_active: true, + description: true, + _count: { + select: { + host_repositories: true, + }, + }, + }, + take: 10, + orderBy: { + name: "asc", + }, + }); + + results.repositories = repositories.map((repo) => ({ + id: repo.id, + name: repo.name, + url: repo.url, + distribution: repo.distribution, + repo_type: repo.repo_type, + is_active: repo.is_active, + description: repo.description, + host_count: repo._count.host_repositories, + type: "repository", + })); + } catch (error) { + console.error("Error searching repositories:", error); + } + } + + // Search users if user has permission + if (userPermissions.can_view_users) { + try { + const users = await prisma.users.findMany({ + where: { + OR: [ + { username: { contains: searchTerm, mode: "insensitive" } }, + { email: { contains: searchTerm, mode: "insensitive" } }, + { first_name: { contains: searchTerm, mode: "insensitive" } }, + { last_name: { contains: searchTerm, mode: "insensitive" } }, + ], + }, + select: { + id: true, + username: true, + email: true, + first_name: true, + last_name: true, + role: true, + is_active: true, + last_login: true, + }, + take: 10, + orderBy: { + username: "asc", + }, + }); + + results.users = users.map((user) => ({ + id: user.id, + username: user.username, + email: user.email, + first_name: user.first_name, + last_name: user.last_name, + role: user.role, + is_active: user.is_active, + last_login: user.last_login, + type: "user", + })); + } catch (error) { + console.error("Error searching users:", error); + } + } + + res.json(results); + } catch (error) { + console.error("Global search error:", error); + res.status(500).json({ + error: "Failed to perform search", + message: error.message, + }); + } +}); + +module.exports = router; diff --git a/backend/src/routes/settingsRoutes.js b/backend/src/routes/settingsRoutes.js index 0474e6d..965bcdb 100644 --- a/backend/src/routes/settingsRoutes.js +++ b/backend/src/routes/settingsRoutes.js @@ -215,6 +215,18 @@ router.put( } return true; }), + body("logoDark") + .optional() + .isLength({ min: 1 }) + .withMessage("Logo dark path must be a non-empty string"), + body("logoLight") + .optional() + .isLength({ min: 1 }) + .withMessage("Logo light path must be a non-empty string"), + body("favicon") + .optional() + .isLength({ min: 1 }) + .withMessage("Favicon path must be a non-empty string"), ], async (req, res) => { try { @@ -236,6 +248,9 @@ router.put( githubRepoUrl, repositoryType, sshKeyPath, + logoDark, + logoLight, + favicon, } = req.body; // Get current settings to check for update interval changes @@ -264,6 +279,9 @@ router.put( if (repositoryType !== undefined) updateData.repository_type = repositoryType; if (sshKeyPath !== undefined) updateData.ssh_key_path = sshKeyPath; + if (logoDark !== undefined) updateData.logo_dark = logoDark; + if (logoLight !== undefined) updateData.logo_light = logoLight; + if (favicon !== undefined) updateData.favicon = favicon; const updatedSettings = await updateSettings( currentSettings.id, @@ -351,4 +369,175 @@ router.get("/auto-update", async (_req, res) => { } }); +// Upload logo files +router.post( + "/logos/upload", + authenticateToken, + requireManageSettings, + async (req, res) => { + try { + const { logoType, fileContent, fileName } = req.body; + + if (!logoType || !fileContent) { + return res.status(400).json({ + error: "Logo type and file content are required", + }); + } + + if (!["dark", "light", "favicon"].includes(logoType)) { + return res.status(400).json({ + error: "Logo type must be 'dark', 'light', or 'favicon'", + }); + } + + // Validate file content (basic checks) + if (typeof fileContent !== "string") { + return res.status(400).json({ + error: "File content must be a base64 string", + }); + } + + const fs = require("node:fs").promises; + const path = require("node:path"); + const _crypto = require("node:crypto"); + + // Create assets directory if it doesn't exist + // In development: save to public/assets (served by Vite) + // In production: save to dist/assets (served by built app) + const isDevelopment = process.env.NODE_ENV !== "production"; + const assetsDir = isDevelopment + ? path.join(__dirname, "../../../frontend/public/assets") + : path.join(__dirname, "../../../frontend/dist/assets"); + await fs.mkdir(assetsDir, { recursive: true }); + + // Determine file extension and path + let fileExtension; + let fileName_final; + + if (logoType === "favicon") { + fileExtension = ".svg"; + fileName_final = fileName || "logo_square.svg"; + } else { + // Determine extension from file content or use default + if (fileContent.startsWith("data:image/png")) { + fileExtension = ".png"; + } else if (fileContent.startsWith("data:image/svg")) { + fileExtension = ".svg"; + } else if ( + fileContent.startsWith("data:image/jpeg") || + fileContent.startsWith("data:image/jpg") + ) { + fileExtension = ".jpg"; + } else { + fileExtension = ".png"; // Default to PNG + } + fileName_final = fileName || `logo_${logoType}${fileExtension}`; + } + + const filePath = path.join(assetsDir, fileName_final); + + // Handle base64 data URLs + let fileBuffer; + if (fileContent.startsWith("data:")) { + const base64Data = fileContent.split(",")[1]; + fileBuffer = Buffer.from(base64Data, "base64"); + } else { + // Assume it's already base64 + fileBuffer = Buffer.from(fileContent, "base64"); + } + + // Create backup of existing file + try { + const backupPath = `${filePath}.backup.${Date.now()}`; + await fs.copyFile(filePath, backupPath); + console.log(`Created backup: ${backupPath}`); + } catch (error) { + // Ignore if original doesn't exist + if (error.code !== "ENOENT") { + console.warn("Failed to create backup:", error.message); + } + } + + // Write new logo file + await fs.writeFile(filePath, fileBuffer); + + // Update settings with new logo path + const settings = await getSettings(); + const logoPath = `/assets/${fileName_final}`; + + const updateData = {}; + if (logoType === "dark") { + updateData.logo_dark = logoPath; + } else if (logoType === "light") { + updateData.logo_light = logoPath; + } else if (logoType === "favicon") { + updateData.favicon = logoPath; + } + + await updateSettings(settings.id, updateData); + + // Get file stats + const stats = await fs.stat(filePath); + + res.json({ + message: `${logoType} logo uploaded successfully`, + fileName: fileName_final, + path: logoPath, + size: stats.size, + sizeFormatted: `${(stats.size / 1024).toFixed(1)} KB`, + }); + } catch (error) { + console.error("Upload logo error:", error); + res.status(500).json({ error: "Failed to upload logo" }); + } + }, +); + +// Reset logo to default +router.post( + "/logos/reset", + authenticateToken, + requireManageSettings, + async (req, res) => { + try { + const { logoType } = req.body; + + if (!logoType) { + return res.status(400).json({ + error: "Logo type is required", + }); + } + + if (!["dark", "light", "favicon"].includes(logoType)) { + return res.status(400).json({ + error: "Logo type must be 'dark', 'light', or 'favicon'", + }); + } + + // Get current settings + const settings = await getSettings(); + + // Clear the custom logo path to revert to default + const updateData = {}; + if (logoType === "dark") { + updateData.logo_dark = null; + } else if (logoType === "light") { + updateData.logo_light = null; + } else if (logoType === "favicon") { + updateData.favicon = null; + } + + await updateSettings(settings.id, updateData); + + res.json({ + message: `${logoType} logo reset to default successfully`, + logoType, + }); + } catch (error) { + console.error("Reset logo error:", error); + res.status(500).json({ error: "Failed to reset logo" }); + } + }, +); + module.exports = router; diff --git a/backend/src/routes/versionRoutes.js b/backend/src/routes/versionRoutes.js index 1ce0f20..81b82c9 100644 --- a/backend/src/routes/versionRoutes.js +++ b/backend/src/routes/versionRoutes.js @@ -2,36 +2,211 @@ const express = require("express"); const { authenticateToken } = require("../middleware/auth"); const { requireManageSettings } = require("../middleware/permissions"); const { PrismaClient } = require("@prisma/client"); -const { exec } = require("node:child_process"); -const { promisify } = require("node:util"); const prisma = new PrismaClient(); -const execAsync = promisify(exec); + +// Default GitHub repository URL +const DEFAULT_GITHUB_REPO = "https://github.com/patchMon/patchmon"; const router = express.Router(); +// Helper function to get current version from package.json +function getCurrentVersion() { + try { + const packageJson = require("../../package.json"); + return packageJson?.version || "1.2.7"; + } catch (packageError) { + console.warn( + "Could not read version from package.json, using fallback:", + packageError.message, + ); + return "1.2.7"; + } +} + +// Helper function to parse GitHub repository URL +function parseGitHubRepo(repoUrl) { + let owner, repo; + + if (repoUrl.includes("git@github.com:")) { + const match = repoUrl.match(/git@github\.com:([^/]+)\/([^/]+)\.git/); + if (match) { + [, owner, repo] = match; + } + } else if (repoUrl.includes("github.com/")) { + const match = repoUrl.match(/github\.com\/([^/]+)\/([^/]+?)(?:\.git)?$/); + if (match) { + [, owner, repo] = match; + } + } + + return { owner, repo }; +} + +// Helper function to get latest release from GitHub API +async function getLatestRelease(owner, repo) { + try { + const currentVersion = getCurrentVersion(); + const apiUrl = `https://api.github.com/repos/${owner}/${repo}/releases/latest`; + + const response = await fetch(apiUrl, { + method: "GET", + headers: { + Accept: "application/vnd.github.v3+json", + "User-Agent": `PatchMon-Server/${currentVersion}`, + }, + }); + + if (!response.ok) { + const errorText = await response.text(); + if ( + errorText.includes("rate limit") || + errorText.includes("API rate limit") + ) { + throw new Error("GitHub API rate limit exceeded"); + } + throw new Error( + `GitHub API error: ${response.status} ${response.statusText}`, + ); + } + + const releaseData = await response.json(); + return { + tagName: releaseData.tag_name, + version: releaseData.tag_name.replace("v", ""), + publishedAt: releaseData.published_at, + htmlUrl: releaseData.html_url, + }; + } catch (error) { + console.error("Error fetching latest release:", error.message); + throw error; // Re-throw to be caught by the calling function + } +} + +// Helper function to get latest commit from main branch +async function getLatestCommit(owner, repo) { + try { + const currentVersion = getCurrentVersion(); + const apiUrl = `https://api.github.com/repos/${owner}/${repo}/commits/main`; + + const response = await fetch(apiUrl, { + method: "GET", + headers: { + Accept: "application/vnd.github.v3+json", + "User-Agent": `PatchMon-Server/${currentVersion}`, + }, + }); + + if (!response.ok) { + const errorText = await response.text(); + if ( + errorText.includes("rate limit") || + errorText.includes("API rate limit") + ) { + throw new Error("GitHub API rate limit exceeded"); + } + throw new Error( + `GitHub API error: ${response.status} ${response.statusText}`, + ); + } + + const commitData = await response.json(); + return { + sha: commitData.sha, + message: commitData.commit.message, + author: commitData.commit.author.name, + date: commitData.commit.author.date, + htmlUrl: commitData.html_url, + }; + } catch (error) { + console.error("Error fetching latest commit:", error.message); + throw error; // Re-throw to be caught by the calling function + } +} + +// Helper function to get commit count difference +async function getCommitDifference(owner, repo, currentVersion) { + try { + const currentVersionTag = `v${currentVersion}`; + // Compare main branch with the released version tag + const apiUrl = `https://api.github.com/repos/${owner}/${repo}/compare/${currentVersionTag}...main`; + + const response = await fetch(apiUrl, { + method: "GET", + headers: { + Accept: "application/vnd.github.v3+json", + "User-Agent": `PatchMon-Server/${getCurrentVersion()}`, + }, + }); + + if (!response.ok) { + const errorText = await response.text(); + if ( + errorText.includes("rate limit") || + errorText.includes("API rate limit") + ) { + throw new Error("GitHub API rate limit exceeded"); + } + throw new Error( + `GitHub API error: ${response.status} ${response.statusText}`, + ); + } + + const compareData = await response.json(); + return { + commitsBehind: compareData.behind_by || 0, // How many commits main is behind release + commitsAhead: compareData.ahead_by || 0, // How many commits main is ahead of release + totalCommits: compareData.total_commits || 0, + branchInfo: "main branch vs release", + }; + } catch (error) { + console.error("Error fetching commit difference:", error.message); + throw error; + } +} + +// Helper function to compare version strings (semantic versioning) +function compareVersions(version1, version2) { + const v1parts = version1.split(".").map(Number); + const v2parts = version2.split(".").map(Number); + + const maxLength = Math.max(v1parts.length, v2parts.length); + + for (let i = 0; i < maxLength; i++) { + const v1part = v1parts[i] || 0; + const v2part = v2parts[i] || 0; + + if (v1part > v2part) return 1; + if (v1part < v2part) return -1; + } + + return 0; +} + // Get current version info router.get("/current", authenticateToken, async (_req, res) => { try { - // Read version from package.json dynamically - let currentVersion = "1.2.7"; // fallback + const currentVersion = getCurrentVersion(); - try { - const packageJson = require("../../package.json"); - if (packageJson?.version) { - currentVersion = packageJson.version; - } - } catch (packageError) { - console.warn( - "Could not read version from package.json, using fallback:", - packageError.message, - ); - } + // Get settings with cached update info (no GitHub API calls) + const settings = await prisma.settings.findFirst(); + const githubRepoUrl = settings?.githubRepoUrl || DEFAULT_GITHUB_REPO; + const { owner, repo } = parseGitHubRepo(githubRepoUrl); + // Return current version and cached update information + // The backend scheduler updates this data periodically res.json({ version: currentVersion, + latest_version: settings?.latest_version || null, + is_update_available: settings?.is_update_available || false, + last_update_check: settings?.last_update_check || null, buildDate: new Date().toISOString(), environment: process.env.NODE_ENV || "development", + github: { + repository: githubRepoUrl, + owner: owner, + repo: repo, + }, }); } catch (error) { console.error("Error getting current version:", error); @@ -44,119 +219,11 @@ router.post( "/test-ssh-key", authenticateToken, requireManageSettings, - async (req, res) => { - try { - const { sshKeyPath, githubRepoUrl } = req.body; - - if (!sshKeyPath || !githubRepoUrl) { - return res.status(400).json({ - error: "SSH key path and GitHub repo URL are required", - }); - } - - // Parse repository info - let owner, repo; - if (githubRepoUrl.includes("git@github.com:")) { - const match = githubRepoUrl.match( - /git@github\.com:([^/]+)\/([^/]+)\.git/, - ); - if (match) { - [, owner, repo] = match; - } - } else if (githubRepoUrl.includes("github.com/")) { - const match = githubRepoUrl.match(/github\.com\/([^/]+)\/([^/]+)/); - if (match) { - [, owner, repo] = match; - } - } - - if (!owner || !repo) { - return res.status(400).json({ - error: "Invalid GitHub repository URL format", - }); - } - - // Check if SSH key file exists and is readable - try { - require("node:fs").accessSync(sshKeyPath); - } catch { - return res.status(400).json({ - error: "SSH key file not found or not accessible", - details: `Cannot access: ${sshKeyPath}`, - suggestion: - "Check the file path and ensure the application has read permissions", - }); - } - - // Test SSH connection to GitHub - const sshRepoUrl = `git@github.com:${owner}/${repo}.git`; - const env = { - ...process.env, - GIT_SSH_COMMAND: `ssh -i ${sshKeyPath} -o StrictHostKeyChecking=no -o IdentitiesOnly=yes -o ConnectTimeout=10`, - }; - - try { - // Test with a simple git command - const { stdout } = await execAsync( - `git ls-remote --heads ${sshRepoUrl} | head -n 1`, - { - timeout: 15000, - env: env, - }, - ); - - if (stdout.trim()) { - return res.json({ - success: true, - message: "SSH key is working correctly", - details: { - sshKeyPath, - repository: `${owner}/${repo}`, - testResult: "Successfully connected to GitHub", - }, - }); - } else { - return res.status(400).json({ - error: "SSH connection succeeded but no data returned", - suggestion: "Check repository access permissions", - }); - } - } catch (sshError) { - console.error("SSH test error:", sshError.message); - - if (sshError.message.includes("Permission denied")) { - return res.status(403).json({ - error: "SSH key permission denied", - details: "The SSH key exists but GitHub rejected the connection", - suggestion: - "Verify the SSH key is added to the repository as a deploy key with read access", - }); - } else if (sshError.message.includes("Host key verification failed")) { - return res.status(403).json({ - error: "Host key verification failed", - suggestion: - "This is normal for first-time connections. The key will be added to known_hosts automatically.", - }); - } else if (sshError.message.includes("Connection timed out")) { - return res.status(408).json({ - error: "Connection timed out", - suggestion: "Check your internet connection and GitHub status", - }); - } else { - return res.status(500).json({ - error: "SSH connection failed", - details: sshError.message, - suggestion: "Check the SSH key format and repository URL", - }); - } - } - } catch (error) { - console.error("SSH key test error:", error); - res.status(500).json({ - error: "Failed to test SSH key", - details: error.message, - }); - } + async (_req, res) => { + res.status(410).json({ + error: + "SSH key testing has been removed. Using default public repository.", + }); }, ); @@ -174,24 +241,90 @@ router.get( return res.status(400).json({ error: "Settings not found" }); } - const currentVersion = "1.2.7"; - const latestVersion = settings.latest_version || currentVersion; - const isUpdateAvailable = settings.update_available || false; - const lastUpdateCheck = settings.last_update_check || null; + const currentVersion = getCurrentVersion(); + const githubRepoUrl = settings.githubRepoUrl || DEFAULT_GITHUB_REPO; + const { owner, repo } = parseGitHubRepo(githubRepoUrl); + + let latestRelease = null; + let latestCommit = null; + let commitDifference = null; + + // Fetch fresh GitHub data if we have valid owner/repo + if (owner && repo) { + try { + const [releaseData, commitData, differenceData] = await Promise.all([ + getLatestRelease(owner, repo), + getLatestCommit(owner, repo), + getCommitDifference(owner, repo, currentVersion), + ]); + + latestRelease = releaseData; + latestCommit = commitData; + commitDifference = differenceData; + } catch (githubError) { + console.warn( + "Failed to fetch fresh GitHub data:", + githubError.message, + ); + + // Provide fallback data when GitHub API is rate-limited + if ( + githubError.message.includes("rate limit") || + githubError.message.includes("API rate limit") + ) { + console.log("GitHub API rate limited, providing fallback data"); + latestRelease = { + tagName: "v1.2.7", + version: "1.2.7", + publishedAt: "2025-10-02T17:12:53Z", + htmlUrl: + "https://github.com/PatchMon/PatchMon/releases/tag/v1.2.7", + }; + latestCommit = { + sha: "cc89df161b8ea5d48ff95b0eb405fe69042052cd", + message: "Update README.md\n\nAdded Documentation Links", + author: "9 Technology Group LTD", + date: "2025-10-04T18:38:09Z", + htmlUrl: + "https://github.com/PatchMon/PatchMon/commit/cc89df161b8ea5d48ff95b0eb405fe69042052cd", + }; + commitDifference = { + commitsBehind: 0, + commitsAhead: 3, // Main branch is ahead of release + totalCommits: 3, + branchInfo: "main branch vs release", + }; + } else { + // Fall back to cached data for other errors + latestRelease = settings.latest_version + ? { + version: settings.latest_version, + tagName: `v${settings.latest_version}`, + } + : null; + } + } + } + + const latestVersion = + latestRelease?.version || settings.latest_version || currentVersion; + const isUpdateAvailable = latestRelease + ? compareVersions(latestVersion, currentVersion) > 0 + : settings.update_available || false; res.json({ currentVersion, latestVersion, isUpdateAvailable, - lastUpdateCheck, + lastUpdateCheck: settings.last_update_check || null, repositoryType: settings.repository_type || "public", - latestRelease: { - tagName: latestVersion ? `v${latestVersion}` : null, - version: latestVersion, - repository: settings.github_repo_url - ? settings.github_repo_url.split("/").slice(-2).join("/") - : null, - accessMethod: settings.repository_type === "private" ? "ssh" : "api", + github: { + repository: githubRepoUrl, + owner: owner, + repo: repo, + latestRelease: latestRelease, + latestCommit: latestCommit, + commitDifference: commitDifference, }, }); } catch (error) { diff --git a/backend/src/server.js b/backend/src/server.js index 394fe2a..e407f0a 100644 --- a/backend/src/server.js +++ b/backend/src/server.js @@ -60,6 +60,8 @@ const { const repositoryRoutes = require("./routes/repositoryRoutes"); const versionRoutes = require("./routes/versionRoutes"); const tfaRoutes = require("./routes/tfaRoutes"); +const searchRoutes = require("./routes/searchRoutes"); +const autoEnrollmentRoutes = require("./routes/autoEnrollmentRoutes"); const updateScheduler = require("./services/updateScheduler"); const { initSettings } = require("./services/settingsService"); const { cleanup_expired_sessions } = require("./utils/session_manager"); @@ -414,6 +416,12 @@ app.use(`/api/${apiVersion}/dashboard-preferences`, dashboardPreferencesRoutes); app.use(`/api/${apiVersion}/repositories`, repositoryRoutes); app.use(`/api/${apiVersion}/version`, versionRoutes); app.use(`/api/${apiVersion}/tfa`, tfaRoutes); +app.use(`/api/${apiVersion}/search`, searchRoutes); +app.use( + `/api/${apiVersion}/auto-enrollment`, + authLimiter, + autoEnrollmentRoutes, +); // Error handling middleware app.use((err, _req, res, _next) => { diff --git a/backend/src/services/updateScheduler.js b/backend/src/services/updateScheduler.js index 074cf8e..34f1979 100644 --- a/backend/src/services/updateScheduler.js +++ b/backend/src/services/updateScheduler.js @@ -60,13 +60,8 @@ class UpdateScheduler { // Get settings const settings = await prisma.settings.findFirst(); - if (!settings || !settings.githubRepoUrl) { - console.log("โš ๏ธ No GitHub repository configured, skipping update check"); - return; - } - - // Extract owner and repo from GitHub URL - const repoUrl = settings.githubRepoUrl; + const DEFAULT_GITHUB_REPO = "https://github.com/patchMon/patchmon"; + const repoUrl = settings?.githubRepoUrl || DEFAULT_GITHUB_REPO; let owner, repo; if (repoUrl.includes("git@github.com:")) { @@ -128,9 +123,9 @@ class UpdateScheduler { await prisma.settings.update({ where: { id: settings.id }, data: { - lastUpdateCheck: new Date(), - updateAvailable: isUpdateAvailable, - latestVersion: latestVersion, + last_update_check: new Date(), + update_available: isUpdateAvailable, + latest_version: latestVersion, }, }); @@ -147,8 +142,8 @@ class UpdateScheduler { await prisma.settings.update({ where: { id: settings.id }, data: { - lastUpdateCheck: new Date(), - updateAvailable: false, + last_update_check: new Date(), + update_available: false, }, }); } @@ -241,6 +236,16 @@ class UpdateScheduler { }); if (!response.ok) { + const errorText = await response.text(); + if ( + errorText.includes("rate limit") || + errorText.includes("API rate limit") + ) { + console.log( + "โš ๏ธ GitHub API rate limit exceeded, skipping update check", + ); + return null; // Return null instead of throwing error + } throw new Error( `GitHub API error: ${response.status} ${response.statusText}`, ); diff --git a/backend/src/utils/session_manager.js b/backend/src/utils/session_manager.js index b941ec5..83cbf74 100644 --- a/backend/src/utils/session_manager.js +++ b/backend/src/utils/session_manager.js @@ -1,5 +1,5 @@ const jwt = require("jsonwebtoken"); -const crypto = require("crypto"); +const crypto = require("node:crypto"); const { PrismaClient } = require("@prisma/client"); const prisma = new PrismaClient(); @@ -9,7 +9,10 @@ const prisma = new PrismaClient(); */ // Configuration -const JWT_SECRET = process.env.JWT_SECRET || "your-secret-key"; +if (!process.env.JWT_SECRET) { + throw new Error("JWT_SECRET environment variable is required"); +} +const JWT_SECRET = process.env.JWT_SECRET; const JWT_EXPIRES_IN = process.env.JWT_EXPIRES_IN || "1h"; const JWT_REFRESH_EXPIRES_IN = process.env.JWT_REFRESH_EXPIRES_IN || "7d"; const INACTIVITY_TIMEOUT_MINUTES = parseInt( diff --git a/frontend/index.html b/frontend/index.html index b51290e..253c75c 100644 --- a/frontend/index.html +++ b/frontend/index.html @@ -2,7 +2,7 @@ - + PatchMon - Linux Patch Monitoring Dashboard diff --git a/frontend/src/App.jsx b/frontend/src/App.jsx index a9642ca..a8399e7 100644 --- a/frontend/src/App.jsx +++ b/frontend/src/App.jsx @@ -1,6 +1,7 @@ import { Route, Routes } from "react-router-dom"; import FirstTimeAdminSetup from "./components/FirstTimeAdminSetup"; import Layout from "./components/Layout"; +import LogoProvider from "./components/LogoProvider"; import ProtectedRoute from "./components/ProtectedRoute"; import SettingsLayout from "./components/SettingsLayout"; import { isAuthPhase } from "./constants/authPhases"; @@ -290,6 +291,16 @@ function AppRoutes() { } /> + + + + + + } + /> - + + + diff --git a/frontend/src/components/DiscordIcon.jsx b/frontend/src/components/DiscordIcon.jsx new file mode 100644 index 0000000..dd85546 --- /dev/null +++ b/frontend/src/components/DiscordIcon.jsx @@ -0,0 +1,16 @@ +const DiscordIcon = ({ className = "h-5 w-5" }) => { + return ( + + Discord + + + ); +}; + +export default DiscordIcon; diff --git a/frontend/src/components/GlobalSearch.jsx b/frontend/src/components/GlobalSearch.jsx new file mode 100644 index 0000000..d1f5c91 --- /dev/null +++ b/frontend/src/components/GlobalSearch.jsx @@ -0,0 +1,428 @@ +import { GitBranch, Package, Search, Server, User, X } from "lucide-react"; +import { useCallback, useEffect, useRef, useState } from "react"; +import { useNavigate } from "react-router-dom"; +import { searchAPI } from "../utils/api"; + +const GlobalSearch = () => { + const [query, setQuery] = useState(""); + const [results, setResults] = useState(null); + const [isOpen, setIsOpen] = useState(false); + const [isLoading, setIsLoading] = useState(false); + const [selectedIndex, setSelectedIndex] = useState(-1); + const searchRef = useRef(null); + const inputRef = useRef(null); + const navigate = useNavigate(); + + // Debounce search + const debounceTimerRef = useRef(null); + + const performSearch = useCallback(async (searchQuery) => { + if (!searchQuery || searchQuery.trim().length === 0) { + setResults(null); + setIsOpen(false); + return; + } + + setIsLoading(true); + try { + const response = await searchAPI.global(searchQuery); + setResults(response.data); + setIsOpen(true); + setSelectedIndex(-1); + } catch (error) { + console.error("Search error:", error); + setResults(null); + } finally { + setIsLoading(false); + } + }, []); + + const handleInputChange = (e) => { + const value = e.target.value; + setQuery(value); + + // Clear previous timer + if (debounceTimerRef.current) { + clearTimeout(debounceTimerRef.current); + } + + // Set new timer + debounceTimerRef.current = setTimeout(() => { + performSearch(value); + }, 300); + }; + + const handleClear = () => { + // Clear debounce timer to prevent any pending searches + if (debounceTimerRef.current) { + clearTimeout(debounceTimerRef.current); + } + setQuery(""); + setResults(null); + setIsOpen(false); + setSelectedIndex(-1); + inputRef.current?.focus(); + }; + + const handleResultClick = (result) => { + // Navigate based on result type + switch (result.type) { + case "host": + navigate(`/hosts/${result.id}`); + break; + case "package": + navigate(`/packages/${result.id}`); + break; + case "repository": + navigate(`/repositories/${result.id}`); + break; + case "user": + // Users don't have detail pages, so navigate to settings + navigate("/settings/users"); + break; + default: + break; + } + + // Close dropdown and clear + handleClear(); + }; + + // Close dropdown when clicking outside + useEffect(() => { + const handleClickOutside = (event) => { + if (searchRef.current && !searchRef.current.contains(event.target)) { + setIsOpen(false); + } + }; + + document.addEventListener("mousedown", handleClickOutside); + return () => { + document.removeEventListener("mousedown", handleClickOutside); + }; + }, []); + + // Keyboard navigation + const flattenedResults = []; + if (results) { + if (results.hosts?.length > 0) { + flattenedResults.push({ type: "header", label: "Hosts" }); + flattenedResults.push(...results.hosts); + } + if (results.packages?.length > 0) { + flattenedResults.push({ type: "header", label: "Packages" }); + flattenedResults.push(...results.packages); + } + if (results.repositories?.length > 0) { + flattenedResults.push({ type: "header", label: "Repositories" }); + flattenedResults.push(...results.repositories); + } + if (results.users?.length > 0) { + flattenedResults.push({ type: "header", label: "Users" }); + flattenedResults.push(...results.users); + } + } + + const navigableResults = flattenedResults.filter((r) => r.type !== "header"); + + const handleKeyDown = (e) => { + if (!isOpen || !results) return; + + switch (e.key) { + case "ArrowDown": + e.preventDefault(); + setSelectedIndex((prev) => + prev < navigableResults.length - 1 ? prev + 1 : prev, + ); + break; + case "ArrowUp": + e.preventDefault(); + setSelectedIndex((prev) => (prev > 0 ? prev - 1 : -1)); + break; + case "Enter": + e.preventDefault(); + if (selectedIndex >= 0 && navigableResults[selectedIndex]) { + handleResultClick(navigableResults[selectedIndex]); + } + break; + case "Escape": + e.preventDefault(); + setIsOpen(false); + setSelectedIndex(-1); + break; + default: + break; + } + }; + + // Get icon for result type + const getResultIcon = (type) => { + switch (type) { + case "host": + return ; + case "package": + return ; + case "repository": + return ; + case "user": + return ; + default: + return null; + } + }; + + // Get display text for result + const getResultDisplay = (result) => { + switch (result.type) { + case "host": + return { + primary: result.friendly_name || result.hostname, + secondary: result.ip || result.hostname, + }; + case "package": + return { + primary: result.name, + secondary: result.description || result.category, + }; + case "repository": + return { + primary: result.name, + secondary: result.distribution, + }; + case "user": + return { + primary: result.username, + secondary: result.email, + }; + default: + return { primary: "", secondary: "" }; + } + }; + + const hasResults = + results && + (results.hosts?.length > 0 || + results.packages?.length > 0 || + results.repositories?.length > 0 || + results.users?.length > 0); + + return ( +
+
+
+ +
+ { + if (query && results) setIsOpen(true); + }} + /> + {query && ( + + )} +
+ + {/* Dropdown Results */} + {isOpen && ( +
+ {isLoading ? ( +
+ Searching... +
+ ) : hasResults ? ( +
+ {/* Hosts */} + {results.hosts?.length > 0 && ( +
+
+ Hosts +
+ {results.hosts.map((host, _idx) => { + const display = getResultDisplay(host); + const globalIdx = navigableResults.findIndex( + (r) => r.id === host.id && r.type === "host", + ); + return ( + + ); + })} +
+ )} + + {/* Packages */} + {results.packages?.length > 0 && ( +
+
+ Packages +
+ {results.packages.map((pkg, _idx) => { + const display = getResultDisplay(pkg); + const globalIdx = navigableResults.findIndex( + (r) => r.id === pkg.id && r.type === "package", + ); + return ( + + ); + })} +
+ )} + + {/* Repositories */} + {results.repositories?.length > 0 && ( +
+
+ Repositories +
+ {results.repositories.map((repo, _idx) => { + const display = getResultDisplay(repo); + const globalIdx = navigableResults.findIndex( + (r) => r.id === repo.id && r.type === "repository", + ); + return ( + + ); + })} +
+ )} + + {/* Users */} + {results.users?.length > 0 && ( +
+
+ Users +
+ {results.users.map((user, _idx) => { + const display = getResultDisplay(user); + const globalIdx = navigableResults.findIndex( + (r) => r.id === user.id && r.type === "user", + ); + return ( + + ); + })} +
+ )} +
+ ) : query.trim() ? ( +
+ No results found for "{query}" +
+ ) : null} +
+ )} +
+ ); +}; + +export default GlobalSearch; diff --git a/frontend/src/components/Layout.jsx b/frontend/src/components/Layout.jsx index a2a096e..a2b3435 100644 --- a/frontend/src/components/Layout.jsx +++ b/frontend/src/components/Layout.jsx @@ -2,6 +2,7 @@ import { useQuery } from "@tanstack/react-query"; import { Activity, BarChart3, + BookOpen, ChevronLeft, ChevronRight, Clock, @@ -13,13 +14,12 @@ import { LogOut, Mail, Menu, - MessageCircle, Package, Plus, RefreshCw, + Route, Server, Settings, - Shield, Star, UserCircle, X, @@ -29,6 +29,9 @@ import { Link, useLocation, useNavigate } from "react-router-dom"; import { useAuth } from "../contexts/AuthContext"; import { useUpdateNotification } from "../contexts/UpdateNotificationContext"; import { dashboardAPI, versionAPI } from "../utils/api"; +import DiscordIcon from "./DiscordIcon"; +import GlobalSearch from "./GlobalSearch"; +import Logo from "./Logo"; import UpgradeNotificationIcon from "./UpgradeNotificationIcon"; const Layout = ({ children }) => { @@ -292,7 +295,7 @@ const Layout = ({ children }) => { onClick={() => setSidebarOpen(false)} aria-label="Close sidebar" /> -
+
-
-
- -

- PatchMon -

-
+
+ + +