mirror of
https://github.com/plexguide/Huntarr.io.git
synced 2026-02-23 07:08:45 -06:00
feat: optimize logging by changing verbose messages to DEBUG level across authentication, stats management, and web server components to reduce log noise
This commit is contained in:
@@ -627,3 +627,16 @@ The smart `cyclelock` system provides reliable cycle state tracking:
|
||||
2. **Timezone handling**: Always use UTC for consistent datetime calculations across containers
|
||||
3. **State management**: Explicit state fields (like cyclelock) are more reliable than inferring state from timestamps
|
||||
4. **FOUC prevention**: Hidden elements need explicit JavaScript to make them visible after initialization
|
||||
5. **Log level optimization**: Move ALL verbose authentication, log streaming, and stats increment messages to DEBUG level to reduce log noise and improve readability. This includes:
|
||||
- "Request IP address" messages in `auth.py`
|
||||
- "Local Bypass Mode is DISABLED" messages in `auth.py`
|
||||
- "Direct IP is a local network IP" messages in `auth.py`
|
||||
- "Local network access - Authentication bypassed" messages in `auth.py`
|
||||
- "Starting log stream" messages in `web_server.py`
|
||||
- "Log stream generator started" messages in `web_server.py`
|
||||
- "Client disconnected from log stream" messages in `web_server.py`
|
||||
- "Successfully removed client from active log streams" messages in `web_server.py`
|
||||
- "Log stream generator finished" messages in `web_server.py`
|
||||
- "*** STATS INCREMENT ***" messages in `stats_manager.py`
|
||||
- "*** STATS ONLY INCREMENT ***" messages in `stats_manager.py`
|
||||
- "*** STATS INCREMENT ***" messages in app-specific files (e.g., `sonarr/missing.py`, `sonarr/upgrade.py`)
|
||||
|
||||
@@ -186,7 +186,7 @@ def process_missing_episodes_mode(
|
||||
for episode_id in episode_ids:
|
||||
# Increment stat for each episode individually, just like Radarr
|
||||
increment_stat("sonarr", "hunted")
|
||||
sonarr_logger.info(f"*** STATS INCREMENT *** sonarr hunted by 1 for episode ID {episode_id}")
|
||||
sonarr_logger.debug(f"*** STATS INCREMENT *** sonarr hunted by 1 for episode ID {episode_id}")
|
||||
|
||||
# Log to history system
|
||||
# Find the corresponding episode data for this ID
|
||||
|
||||
@@ -179,7 +179,7 @@ def process_upgrade_episodes_mode(
|
||||
for episode_id in episode_ids:
|
||||
# Increment stat for each episode individually, just like Radarr
|
||||
increment_stat("sonarr", "upgraded")
|
||||
sonarr_logger.info(f"*** STATS INCREMENT *** sonarr upgraded by 1 for episode ID {episode_id}")
|
||||
sonarr_logger.debug(f"*** STATS INCREMENT *** sonarr upgraded by 1 for episode ID {episode_id}")
|
||||
|
||||
# Mark episodes as processed using stateful management
|
||||
for episode_id in episode_ids:
|
||||
|
||||
+5
-5
@@ -311,7 +311,7 @@ def authenticate_request():
|
||||
return None
|
||||
|
||||
remote_addr = request.remote_addr
|
||||
logger.info(f"Request IP address: {remote_addr}")
|
||||
logger.debug(f"Request IP address: {remote_addr}")
|
||||
|
||||
if local_access_bypass:
|
||||
# Common local network IP ranges
|
||||
@@ -351,7 +351,7 @@ def authenticate_request():
|
||||
for network in local_networks:
|
||||
if possible_client_ip == network or (network.endswith('.') and possible_client_ip.startswith(network)):
|
||||
is_local = True
|
||||
logger.info(f"Forwarded IP {possible_client_ip} is a local network IP (matches {network})")
|
||||
logger.debug(f"Forwarded IP {possible_client_ip} is a local network IP (matches {network})")
|
||||
break
|
||||
|
||||
# Check if direct remote_addr is a local network IP if not already determined
|
||||
@@ -359,16 +359,16 @@ def authenticate_request():
|
||||
for network in local_networks:
|
||||
if remote_addr == network or (network.endswith('.') and remote_addr.startswith(network)):
|
||||
is_local = True
|
||||
logger.info(f"Direct IP {remote_addr} is a local network IP (matches {network})")
|
||||
logger.debug(f"Direct IP {remote_addr} is a local network IP (matches {network})")
|
||||
break
|
||||
|
||||
if is_local:
|
||||
logger.info(f"Local network access from {remote_addr} - Authentication bypassed! (Local Bypass Mode)")
|
||||
logger.debug(f"Local network access from {remote_addr} - Authentication bypassed! (Local Bypass Mode)")
|
||||
return None
|
||||
else:
|
||||
logger.warning(f"Access from {remote_addr} is not recognized as local network - Authentication required")
|
||||
else:
|
||||
logger.info("Local Bypass Mode is DISABLED - Authentication required")
|
||||
logger.debug("Local Bypass Mode is DISABLED - Authentication required")
|
||||
|
||||
# Check for valid session
|
||||
session_id = session.get(SESSION_COOKIE_NAME)
|
||||
|
||||
@@ -408,7 +408,7 @@ def increment_stat(app_type: str, stat_type: str, count: int = 1) -> bool:
|
||||
prev_value = stats[app_type][stat_type]
|
||||
stats[app_type][stat_type] += count
|
||||
new_value = stats[app_type][stat_type]
|
||||
logger.info(f"*** STATS INCREMENT *** {app_type} {stat_type} by {count}: {prev_value} -> {new_value}")
|
||||
logger.debug(f"*** STATS INCREMENT *** {app_type} {stat_type} by {count}: {prev_value} -> {new_value}")
|
||||
save_success = save_stats(stats)
|
||||
|
||||
if not save_success:
|
||||
@@ -455,7 +455,7 @@ def increment_stat_only(app_type: str, stat_type: str, count: int = 1) -> bool:
|
||||
prev_value = stats[app_type][stat_type]
|
||||
stats[app_type][stat_type] += count
|
||||
new_value = stats[app_type][stat_type]
|
||||
logger.info(f"*** STATS ONLY INCREMENT *** {app_type} {stat_type} by {count}: {prev_value} -> {new_value} (API cap NOT incremented)")
|
||||
logger.debug(f"*** STATS ONLY INCREMENT *** {app_type} {stat_type} by {count}: {prev_value} -> {new_value} (API cap NOT incremented)")
|
||||
save_success = save_stats(stats)
|
||||
|
||||
if not save_success:
|
||||
|
||||
@@ -342,7 +342,7 @@ def logs_stream():
|
||||
client_id = request.remote_addr
|
||||
current_time_str = datetime.datetime.now().strftime("%H:%M:%S") # Renamed variable
|
||||
|
||||
web_logger.info(f"Starting log stream for app type: {app_type} (client: {client_id}, time: {current_time_str})")
|
||||
web_logger.debug(f"Starting log stream for app type: {app_type} (client: {client_id}, time: {current_time_str})")
|
||||
|
||||
# Track active connections to limit resource usage
|
||||
if not hasattr(app, 'active_log_streams'):
|
||||
@@ -377,7 +377,7 @@ def logs_stream():
|
||||
def generate():
|
||||
"""Generate log events for the SSE stream."""
|
||||
client_ip = request.remote_addr
|
||||
web_logger.info(f"Log stream generator started for {app_type} (Client: {client_ip})")
|
||||
web_logger.debug(f"Log stream generator started for {app_type} (Client: {client_ip})")
|
||||
try:
|
||||
# Initialize last activity time
|
||||
last_activity = time.time()
|
||||
@@ -534,7 +534,7 @@ def logs_stream():
|
||||
|
||||
except GeneratorExit:
|
||||
# Clean up when client disconnects
|
||||
web_logger.info(f"Client {client_id} disconnected from log stream for {app_type}. Cleaning up.")
|
||||
web_logger.debug(f"Client {client_id} disconnected from log stream for {app_type}. Cleaning up.")
|
||||
except Exception as e:
|
||||
web_logger.error(f"Unhandled error in log stream generator for {app_type} (Client: {client_ip}): {e}", exc_info=True)
|
||||
try:
|
||||
@@ -547,10 +547,10 @@ def logs_stream():
|
||||
with app.log_stream_lock:
|
||||
removed_client = app.active_log_streams.pop(client_id, None)
|
||||
if removed_client:
|
||||
web_logger.info(f"Successfully removed client {client_id} from active log streams.")
|
||||
web_logger.debug(f"Successfully removed client {client_id} from active log streams.")
|
||||
else:
|
||||
web_logger.debug(f"Client {client_id} was already removed from active log streams before finally block.")
|
||||
web_logger.info(f"Log stream generator finished for {app_type} (Client: {client_id})")
|
||||
web_logger.debug(f"Log stream generator finished for {app_type} (Client: {client_id})")
|
||||
|
||||
# Return the SSE response with appropriate headers for better streaming
|
||||
response = Response(stream_with_context(generate()), mimetype='text/event-stream') # Use stream_with_context
|
||||
|
||||
Reference in New Issue
Block a user