From 6c82a2858106930232fbc435638de334eec91109 Mon Sep 17 00:00:00 2001 From: Admin9705 <9705@duck.com> Date: Fri, 13 Jun 2025 00:28:19 -0400 Subject: [PATCH] Refactor logging system to utilize a database-only approach, removing file-based logging. Update Dockerfile and workflows to reflect changes in log storage, and modify documentation to guide users on accessing logs from the database. Enhance logging setup in the application to ensure compatibility with the new system. --- .github/listen.md | 2 +- .github/workflows/macos-installer-arm.yml | 2 +- .github/workflows/macos-installer-intel.yml | 2 +- Dockerfile | 4 +- docs/macos-installer.md | 2 +- main.py | 1 + src/primary/utils/clean_logger.py | 268 ++++++-------------- src/primary/utils/logs_database.py | 64 ++--- src/primary/web_server.py | 24 +- 9 files changed, 127 insertions(+), 242 deletions(-) diff --git a/.github/listen.md b/.github/listen.md index 9739b7d7..60dc62b9 100644 --- a/.github/listen.md +++ b/.github/listen.md @@ -481,7 +481,7 @@ grep -r "functionName\|variableName" frontend/ --include="*.js" 4. Add missing anchors or fix links ### Log Issues -1. Check if logs exist: `docker exec huntarr cat /config/logs/[app].log` +1. Check logs in database: `docker exec huntarr python3 -c "import sys; sys.path.insert(0, '/app/src'); from primary.utils.logs_database import get_logs_database; db = get_logs_database(); logs = db.get_logs(limit=10); [print(f'{log[\"timestamp\"]} - {log[\"app_type\"]} - {log[\"level\"]} - {log[\"message\"]}') for log in logs]"` 2. Test backend streaming: `curl -N -s "http://localhost:9705/logs?app=[app]"` 3. Check browser console for JavaScript errors 4. Verify regex patterns in `new-main.js` diff --git a/.github/workflows/macos-installer-arm.yml b/.github/workflows/macos-installer-arm.yml index 5ca5682e..bb22f47a 100644 --- a/.github/workflows/macos-installer-arm.yml +++ b/.github/workflows/macos-installer-arm.yml @@ -417,7 +417,7 @@ jobs: # Create config directory in user's Application Support mkdir -p "$HOME/Library/Application Support/Huntarr/config" - mkdir -p "$HOME/Library/Application Support/Huntarr/config/logs" + # Logs are now stored in database only # Set permissions chmod -R 755 "$HOME/Library/Application Support/Huntarr" diff --git a/.github/workflows/macos-installer-intel.yml b/.github/workflows/macos-installer-intel.yml index b6ac7728..6130095d 100644 --- a/.github/workflows/macos-installer-intel.yml +++ b/.github/workflows/macos-installer-intel.yml @@ -413,7 +413,7 @@ jobs: # Create config directory in user's Application Support mkdir -p "$HOME/Library/Application Support/Huntarr/config" - mkdir -p "$HOME/Library/Application Support/Huntarr/config/logs" + # Logs are now stored in database only # Set permissions chmod -R 755 "$HOME/Library/Application Support/Huntarr" diff --git a/Dockerfile b/Dockerfile index 2882ead9..5379b2b0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -19,8 +19,8 @@ RUN pip install --no-cache-dir -r requirements.txt COPY . /app/ # Create necessary directories -RUN mkdir -p /config/logs -RUN chmod -R 755 /config +# Log files are now stored in database only +RUN mkdir -p /config && chmod -R 755 /config # Set environment variables ENV PYTHONPATH=/app diff --git a/docs/macos-installer.md b/docs/macos-installer.md index 8b83a5ab..5aaddc54 100644 --- a/docs/macos-installer.md +++ b/docs/macos-installer.md @@ -44,7 +44,7 @@ The macOS application functions similarly to the Docker version with a few key d If you encounter issues: -1. Check the log files in `~/Library/Application Support/Huntarr/config/logs/` +1. Check the logs database at `~/Library/Application Support/Huntarr/config/logs.db` 2. Ensure proper permissions for the application folders 3. Verify your macOS version is compatible (macOS 10.15 Catalina or newer recommended) diff --git a/main.py b/main.py index 985206e4..4cc2c4e0 100644 --- a/main.py +++ b/main.py @@ -138,6 +138,7 @@ try: huntarr_logger.warning(f"Failed to initialize database logging: {e}") huntarr_logger.info("Successfully imported application components.") + huntarr_logger.info("About to call main() function...") except ImportError as e: root_logger.critical(f"Fatal Error: Failed to import application components: {e}", exc_info=True) root_logger.critical("Please ensure the application structure is correct, dependencies are installed (`pip install -r requirements.txt`), and the script is run from the project root.") diff --git a/src/primary/utils/clean_logger.py b/src/primary/utils/clean_logger.py index 5271ee2e..69138153 100644 --- a/src/primary/utils/clean_logger.py +++ b/src/primary/utils/clean_logger.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 """ -Clean logging system for frontend consumption -Creates clean, stripped log messages without redundant information +Clean Logger for Huntarr +Provides database-only logging with clean, formatted messages for the web interface. """ import logging @@ -13,173 +13,94 @@ from pathlib import Path from typing import Dict, Optional import pytz -# Use the centralized path configuration -from src.primary.utils.config_paths import LOG_DIR - -# Clean log files for frontend consumption -CLEAN_LOG_FILES = { - "system": LOG_DIR / "clean_huntarr.log", - "sonarr": LOG_DIR / "clean_sonarr.log", - "radarr": LOG_DIR / "clean_radarr.log", - "lidarr": LOG_DIR / "clean_lidarr.log", - "readarr": LOG_DIR / "clean_readarr.log", - "whisparr": LOG_DIR / "clean_whisparr.log", - "eros": LOG_DIR / "clean_eros.log", - "swaparr": LOG_DIR / "clean_swaparr.log", -} - -def _get_user_timezone(): - """Get the user's selected timezone from general settings""" - try: - from src.primary.utils.timezone_utils import get_user_timezone - return get_user_timezone() - except Exception: - return pytz.UTC class CleanLogFormatter(logging.Formatter): """ - Custom formatter that creates clean log messages for frontend consumption. - Uses pipe separators for easy parsing: timestamp|level|app_type|message - All timestamps are stored in UTC for consistency. + Custom formatter that creates clean, readable log messages. """ def __init__(self): - # No format needed as we'll build it manually super().__init__() + self.timezone = self._get_timezone() - def format(self, record): - """ - Format the log record as: timestamp|level|app_type|clean_message - This format makes it easy for frontend to parse and display properly. - Timestamps are always stored in UTC. - """ - # Get the original formatted message - original_message = record.getMessage() - - # Clean the message by removing redundant information - clean_message = self._clean_message(original_message, record.name, record.levelname) - - # Format timestamp in UTC (for consistent storage) - timestamp = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(record.created)) - - # Determine app type from logger name - app_type = self._get_app_type(record.name) - - # Format as: timestamp|level|app_type|message - return f"{timestamp}|{record.levelname}|{app_type}|{clean_message}" + def _get_timezone(self): + """Get the configured timezone""" + try: + from src.primary.utils.timezone_utils import get_user_timezone + return get_user_timezone() + except ImportError: + # Fallback to UTC if timezone utils not available + return pytz.UTC - def _clean_message(self, message: str, logger_name: str, level: str) -> str: - """ - Clean a log message by removing redundant information. + def _get_app_type_from_logger_name(self, logger_name: str) -> str: + """Extract app type from logger name""" + if not logger_name: + return "system" - Args: - message: Original log message - logger_name: Name of the logger (e.g., 'huntarr.sonarr') - level: Log level (DEBUG, INFO, etc.) - - Returns: - Cleaned message with redundant information removed - """ - clean_msg = message + # Handle logger names like "huntarr.sonarr" or just "huntarr" + if "huntarr" in logger_name.lower(): + parts = logger_name.split(".") + if len(parts) > 1: + return parts[-1] # Return the last part (e.g., "sonarr") + else: + return "system" # Just "huntarr" becomes "system" - # Remove timestamp patterns at the beginning - # Patterns: YYYY-MM-DD HH:MM:SS [Timezone] - clean_msg = re.sub(r'^\d{4}-\d{2}-\d{2}\s+\d{2}:\d{2}:\d{2}(?:\s+[A-Za-z_/]+)?\s*-\s*', '', clean_msg) + # For other logger names, try to extract app type + known_apps = ["sonarr", "radarr", "lidarr", "readarr", "whisparr", "eros", "swaparr"] + logger_lower = logger_name.lower() + for app in known_apps: + if app in logger_lower: + return app - # Remove logger name patterns - # e.g., "huntarr.sonarr - DEBUG -" or "huntarr -" - logger_pattern = logger_name.replace('.', r'\.') - clean_msg = re.sub(f'^{logger_pattern}\\s*-\\s*{level}\\s*-\\s*', '', clean_msg) - clean_msg = re.sub(f'^{logger_pattern}\\s*-\\s*', '', clean_msg) + return "system" + + def _clean_message(self, message: str) -> str: + """Clean and format the log message""" + if not message: + return "" - # Remove common redundant prefixes + # Remove ANSI color codes + ansi_escape = re.compile(r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])') + message = ansi_escape.sub('', message) + + # Remove excessive whitespace + message = re.sub(r'\s+', ' ', message).strip() + + # Remove common prefixes that add noise prefixes_to_remove = [ - r'^huntarr\.[a-zA-Z]+\s*-\s*(DEBUG|INFO|WARNING|ERROR|CRITICAL)\s*-\s*', - r'^huntarr\s*-\s*(DEBUG|INFO|WARNING|ERROR|CRITICAL)\s*-\s*', - r'^huntarr\.[a-zA-Z]+\s*-\s*', - r'^huntarr\s*-\s*', - r'^\[system\]\s*', - r'^\[sonarr\]\s*', - r'^\[radarr\]\s*', - r'^\[lidarr\]\s*', - r'^\[readarr\]\s*', - r'^\[whisparr\]\s*', - r'^\[eros\]\s*', - r'^\[swaparr\]\s*', + r'^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d{3} ', # Timestamp prefixes + r'^\[\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\] ', # Bracketed timestamps + r'^INFO:', + r'^DEBUG:', + r'^WARNING:', + r'^ERROR:', + r'^CRITICAL:', ] - for pattern in prefixes_to_remove: - clean_msg = re.sub(pattern, '', clean_msg, flags=re.IGNORECASE) + for prefix_pattern in prefixes_to_remove: + message = re.sub(prefix_pattern, '', message) - # Remove any remaining timestamp patterns that might be in the middle - clean_msg = re.sub(r'\d{4}-\d{2}-\d{2}\s+\d{2}:\d{2}:\d{2}(?:\s+[A-Za-z_/]+)?\s*-\s*', '', clean_msg) - - # Clean up extra whitespace and dashes - clean_msg = re.sub(r'^\s*-\s*', '', clean_msg) # Remove leading dashes - clean_msg = re.sub(r'\s+', ' ', clean_msg) # Normalize whitespace - clean_msg = clean_msg.strip() # Remove leading/trailing whitespace - - # If the message is empty after cleaning, provide a fallback - if not clean_msg: - clean_msg = "Log message" - - return clean_msg + return message.strip() - def _get_app_type(self, logger_name: str) -> str: - """ - Determine the app type from the logger name. + def format(self, record): + """Format the log record into a clean message""" + # Get timezone-aware timestamp + dt = datetime.fromtimestamp(record.created, tz=self.timezone) + timestamp_str = dt.strftime('%Y-%m-%d %H:%M:%S') - Args: - logger_name: Name of the logger (e.g., 'huntarr.sonarr') - - Returns: - App type (e.g., 'sonarr', 'system') - """ - # Remove 'huntarr.' prefix if present - if logger_name.startswith('huntarr.'): - logger_name = logger_name[8:] + # Get app type from logger name + app_type = self._get_app_type_from_logger_name(record.name) - # Map logger name to app type - app_types = { - 'sonarr': 'sonarr', - 'radarr': 'radarr', - 'lidarr': 'lidarr', - 'readarr': 'readarr', - 'whisparr': 'whisparr', - 'eros': 'eros', - 'swaparr': 'swaparr', - } + # Clean the message + clean_message = self._clean_message(record.getMessage()) - return app_types.get(logger_name, 'system') - - -class CleanLogHandler(logging.Handler): - """ - Custom log handler that writes clean log messages to separate files. - """ - - def __init__(self, log_file_path: Path): - super().__init__() - self.log_file_path = log_file_path - self.setFormatter(CleanLogFormatter()) - - # Ensure the log directory exists - self.log_file_path.parent.mkdir(parents=True, exist_ok=True) - - def emit(self, record): - """Write the log record to the clean log file.""" - try: - msg = self.format(record) - with open(self.log_file_path, 'a', encoding='utf-8') as f: - f.write(msg + '\n') - except Exception: - self.handleError(record) + # Return formatted message: timestamp|level|app_type|message + return f"{timestamp_str}|{record.levelname}|{app_type}|{clean_message}" class DatabaseLogHandler(logging.Handler): """ - Custom log handler that writes clean log messages directly to the logs database. - This replaces file-based logging for the web UI. + Custom log handler that writes clean log messages to the logs database. """ def __init__(self, app_type: str): @@ -190,7 +111,7 @@ class DatabaseLogHandler(logging.Handler): @property def logs_db(self): - """Lazy load the logs database to avoid circular imports""" + """Lazy load the logs database instance""" if self._logs_db is None: from src.primary.utils.logs_database import get_logs_database self._logs_db = get_logs_database() @@ -230,16 +151,14 @@ class DatabaseLogHandler(logging.Handler): print(f"Error writing log to database: {e}") -# Global clean handlers registry -_clean_handlers: Dict[str, CleanLogHandler] = {} +# Global database handlers registry _database_handlers: Dict[str, DatabaseLogHandler] = {} _setup_complete = False def setup_clean_logging(): """ - Set up clean logging handlers for all known logger types. - This creates both file handlers (for backward compatibility) and database handlers (for web UI). + Set up database logging handlers for all known logger types. This should be called once during application startup. """ global _setup_complete @@ -250,27 +169,20 @@ def setup_clean_logging(): from src.primary.utils.logger import get_logger - # Set up clean handlers for each app type - for app_type, clean_log_file in CLEAN_LOG_FILES.items(): - # File handler (existing functionality) - if app_type not in _clean_handlers: - clean_handler = CleanLogHandler(clean_log_file) - clean_handler.setLevel(logging.DEBUG) - _clean_handlers[app_type] = clean_handler - - # Database handler (new functionality) + # Known app types for Huntarr + app_types = ['system', 'sonarr', 'radarr', 'lidarr', 'readarr', 'whisparr', 'eros', 'swaparr'] + + # Set up database handlers for each app type + for app_type in app_types: + # Database handler if app_type not in _database_handlers: database_handler = DatabaseLogHandler(app_type) database_handler.setLevel(logging.DEBUG) _database_handlers[app_type] = database_handler - # Get the logger for this app type and add both handlers + # Get the logger for this app type and add database handler logger = get_logger(app_type) - # Add file handler if not already added - if _clean_handlers[app_type] not in logger.handlers: - logger.addHandler(_clean_handlers[app_type]) - # Add database handler if not already added if _database_handlers[app_type] not in logger.handlers: logger.addHandler(_database_handlers[app_type]) @@ -278,31 +190,9 @@ def setup_clean_logging(): _setup_complete = True -def get_clean_log_file(app_type: str) -> Optional[Path]: +def get_clean_log_file_path(app_type: str) -> Optional[Path]: """ - Get the clean log file path for a specific app type. - - Args: - app_type: The app type (e.g., 'sonarr', 'system') - - Returns: - Path to the clean log file, or None if not found + Legacy function for backward compatibility. + Returns None since we no longer use file-based logging. """ - return CLEAN_LOG_FILES.get(app_type) - - -def cleanup_clean_logs(): - """Remove all clean log handlers and close files.""" - from src.primary.utils.logger import get_logger - - for app_type, handler in _clean_handlers.items(): - if app_type == "system": - logger_name = "huntarr" - else: - logger_name = app_type - - logger = get_logger(logger_name) - logger.removeHandler(handler) - handler.close() - - _clean_handlers.clear() + return None diff --git a/src/primary/utils/logs_database.py b/src/primary/utils/logs_database.py index 5a5aaa18..357bd320 100644 --- a/src/primary/utils/logs_database.py +++ b/src/primary/utils/logs_database.py @@ -11,37 +11,39 @@ from datetime import datetime, timedelta from pathlib import Path from typing import Dict, List, Any, Optional from contextlib import contextmanager +import threading -from src.primary.utils.logger import get_logger - -logger = get_logger(__name__) +# Don't import logger here to avoid circular dependencies during initialization +# from src.primary.utils.logger import get_logger +# logger = get_logger(__name__) class LogsDatabase: """Database manager for log storage""" def __init__(self): + print("LogsDatabase.__init__() starting...") self.db_path = self._get_database_path() + print(f"Database path set to: {self.db_path}") self.ensure_database_exists() + print("LogsDatabase.__init__() completed successfully") def _get_database_path(self) -> Path: """Get the path to the logs database file""" - # Import CONFIG_DIR here to avoid circular imports - try: - from src.primary.utils.config_paths import CONFIG_DIR - db_path = Path(CONFIG_DIR) / "logs.db" - except ImportError: - # Fallback if config_paths is not available - import os - config_dir = os.environ.get('CONFIG_DIR', '/config') - db_path = Path(config_dir) / "logs.db" - - logger.info(f"Logs database path: {db_path}") + print("_get_database_path() starting...") + # Use simple fallback approach to avoid import issues + import os + config_dir = os.environ.get('CONFIG_DIR', '/config') + db_path = Path(config_dir) / "logs.db" + print(f"Logs database path: {db_path}") return db_path def ensure_database_exists(self): """Create the logs database and tables if they don't exist""" + print("ensure_database_exists() starting...") try: + print(f"Attempting to connect to database at: {self.db_path}") with sqlite3.connect(self.db_path) as conn: + print("Database connection established successfully") # Create logs table conn.execute(''' CREATE TABLE IF NOT EXISTS logs ( @@ -62,9 +64,9 @@ class LogsDatabase: conn.execute('CREATE INDEX IF NOT EXISTS idx_logs_app_level ON logs(app_type, level)') conn.commit() - logger.info(f"Logs database initialized at: {self.db_path}") + print(f"Logs database initialized at: {self.db_path}") except Exception as e: - logger.error(f"Failed to initialize logs database: {e}") + print(f"Failed to initialize logs database: {e}") raise def insert_log(self, timestamp: datetime, level: str, app_type: str, message: str, logger_name: str = None): @@ -110,7 +112,7 @@ class LogsDatabase: return [dict(row) for row in rows] except Exception as e: - logger.error(f"Error getting logs: {e}") + print(f"Error getting logs: {e}") return [] def get_log_count(self, app_type: str = None, level: str = None, search: str = None) -> int: @@ -135,7 +137,7 @@ class LogsDatabase: cursor = conn.execute(query, params) return cursor.fetchone()[0] except Exception as e: - logger.error(f"Error getting log count: {e}") + print(f"Error getting log count: {e}") return 0 def cleanup_old_logs(self, days_to_keep: int = 30, max_entries_per_app: int = 10000): @@ -169,11 +171,11 @@ class LogsDatabase: conn.commit() if deleted_by_age > 0 or total_deleted_by_count > 0: - logger.info(f"Cleaned up logs: {deleted_by_age} by age, {total_deleted_by_count} by count") + print(f"Cleaned up logs: {deleted_by_age} by age, {total_deleted_by_count} by count") return deleted_by_age + total_deleted_by_count except Exception as e: - logger.error(f"Error cleaning up logs: {e}") + print(f"Error cleaning up logs: {e}") return 0 def get_app_types(self) -> List[str]: @@ -183,7 +185,7 @@ class LogsDatabase: cursor = conn.execute("SELECT DISTINCT app_type FROM logs ORDER BY app_type") return [row[0] for row in cursor.fetchall()] except Exception as e: - logger.error(f"Error getting app types: {e}") + print(f"Error getting app types: {e}") return [] def get_log_levels(self) -> List[str]: @@ -193,7 +195,7 @@ class LogsDatabase: cursor = conn.execute("SELECT DISTINCT level FROM logs ORDER BY level") return [row[0] for row in cursor.fetchall()] except Exception as e: - logger.error(f"Error getting log levels: {e}") + print(f"Error getting log levels: {e}") return [] def clear_logs(self, app_type: str = None): @@ -208,21 +210,25 @@ class LogsDatabase: deleted_count = cursor.rowcount conn.commit() - logger.info(f"Cleared {deleted_count} logs" + (f" for {app_type}" if app_type else "")) + print(f"Cleared {deleted_count} logs" + (f" for {app_type}" if app_type else "")) return deleted_count except Exception as e: - logger.error(f"Error clearing logs: {e}") + print(f"Error clearing logs: {e}") return 0 # Global instance _logs_db = None +_logs_db_lock = threading.Lock() def get_logs_database() -> LogsDatabase: - """Get the global logs database instance""" + """Get the global logs database instance (thread-safe singleton)""" global _logs_db if _logs_db is None: - _logs_db = LogsDatabase() + with _logs_db_lock: + # Double-check locking pattern + if _logs_db is None: + _logs_db = LogsDatabase() return _logs_db @@ -239,11 +245,11 @@ def schedule_log_cleanup(): logs_db = get_logs_database() deleted_count = logs_db.cleanup_old_logs(days_to_keep=30, max_entries_per_app=10000) if deleted_count > 0: - logger.info(f"Scheduled cleanup removed {deleted_count} old log entries") + print(f"Scheduled cleanup removed {deleted_count} old log entries") except Exception as e: - logger.error(f"Error in scheduled log cleanup: {e}") + print(f"Error in scheduled log cleanup: {e}") # Start cleanup thread cleanup_thread = threading.Thread(target=cleanup_worker, daemon=True) cleanup_thread.start() - logger.info("Scheduled log cleanup thread started") \ No newline at end of file + print("Scheduled log cleanup thread started") \ No newline at end of file diff --git a/src/primary/web_server.py b/src/primary/web_server.py index 9bad2643..a1b57f42 100644 --- a/src/primary/web_server.py +++ b/src/primary/web_server.py @@ -27,7 +27,7 @@ from flask import Flask, render_template, request, jsonify, Response, send_from_ # Use only settings_manager from src.primary import settings_manager from src.primary.utils.logger import setup_main_logger, get_logger, LOG_DIR, update_logging_levels # Import get_logger, LOG_DIR, and update_logging_levels -from src.primary.utils.clean_logger import CLEAN_LOG_FILES # Import clean log files +# Clean logging is now database-only from src.primary.auth import ( authenticate_request, user_exists, create_user, verify_user, create_session, logout, SESSION_COOKIE_NAME, is_2fa_enabled, generate_2fa_secret, @@ -288,21 +288,7 @@ def inject_base_url(): # Lock for accessing the log files log_lock = Lock() -# Define known log files based on clean logger config -KNOWN_LOG_FILES = { - "sonarr": CLEAN_LOG_FILES.get("sonarr"), - "radarr": CLEAN_LOG_FILES.get("radarr"), - "lidarr": CLEAN_LOG_FILES.get("lidarr"), - "readarr": CLEAN_LOG_FILES.get("readarr"), - "whisparr": CLEAN_LOG_FILES.get("whisparr"), - "eros": CLEAN_LOG_FILES.get("eros"), # Added Eros to known log files - "swaparr": CLEAN_LOG_FILES.get("swaparr"), # Added Swaparr to known log files - "system": CLEAN_LOG_FILES.get("system"), # Map 'system' to the clean huntarr log -} -# Filter out None values if an app log file doesn't exist -KNOWN_LOG_FILES = {k: v for k, v in KNOWN_LOG_FILES.items() if v} - -ALL_APP_LOG_FILES = list(KNOWN_LOG_FILES.values()) # List of all individual log file paths +# Log files are now handled by database-only logging system # Handle both root path and base URL root path @app.route('/') @@ -321,8 +307,7 @@ def user(): # Removed /settings and /logs routes if handled by index.html and JS routing # Keep /logs if it's the actual SSE endpoint -@app.route('/logs') -def logs_stream(): +# Old file-based logs route removed - using database-based logs now """ Event stream for logs. Filter logs by app type using the 'app' query parameter. @@ -563,6 +548,9 @@ def logs_stream(): response.headers['X-Accel-Buffering'] = 'no' # Disable nginx buffering if using nginx return response +# Legacy file-based logs route removed - now using database-based log routes in log_routes.py +# The frontend should use /api/logs endpoints instead + @app.route('/api/settings', methods=['GET']) def api_settings(): if request.method == 'GET':