This commit is contained in:
Admin9705
2025-05-01 10:10:45 -04:00
parent 1349323b4f
commit 0060755652
2 changed files with 104 additions and 22 deletions

View File

@@ -12,6 +12,9 @@ from src.primary.utils.logger import get_logger
from src.primary.apps.lidarr import api as lidarr_api
from src.primary.stats_manager import increment_stat
from src.primary.stateful_manager import is_processed, add_processed_id
from src.primary.state import get_state_file_path
import json
import os
# Get the logger for the Lidarr module
lidarr_logger = get_logger(__name__) # Use __name__ for correct logger hierarchy
@@ -129,24 +132,81 @@ def process_missing_albums(
items_by_artist = {}
for item in missing_items: # Use the potentially filtered missing_items list
artist_id = item.get('artistId')
lidarr_logger.debug(f"Missing album item: {item.get('title')} by artistId: {artist_id}")
if artist_id:
if artist_id not in items_by_artist:
items_by_artist[artist_id] = []
items_by_artist[artist_id].append(item)
target_entities = list(items_by_artist.keys()) # List of artist IDs
lidarr_logger.debug(f"Grouped missing albums into {len(target_entities)} artists.")
lidarr_logger.info(f"Grouped missing albums into {len(target_entities)} artists.")
lidarr_logger.debug(f"Artist IDs with missing albums: {target_entities}")
else: # Album mode
target_entities = [item['id'] for item in missing_items] # Use the potentially filtered missing_items list
# Filter out already processed entities using stateful management
unprocessed_entities = []
for entity_id in target_entities:
if not is_processed("lidarr", instance_name, str(entity_id)):
unprocessed_entities.append(entity_id)
# OVERRIDE NORMAL LOGIC - ALWAYS SELECT A DIFFERENT ARTIST EACH CYCLE
if hunt_missing_mode == "artist" and target_entities:
# Track which artist we select through a state file
state_file_path = get_state_file_path("lidarr", "last_artist_rotation")
last_selected_artists = []
# Attempt to read last rotation state
try:
if os.path.exists(state_file_path):
with open(state_file_path, 'r') as f:
rotation_data = json.load(f)
last_selected_artists = rotation_data.get("artists", [])
lidarr_logger.debug(f"Loaded artist rotation history: {last_selected_artists}")
except Exception as e:
lidarr_logger.error(f"Error reading artist rotation file: {e}")
# Continue with empty history if file can't be read
# Filter out previously selected artists that still exist in target_entities
remaining_artists = [a for a in target_entities if a not in last_selected_artists]
# If we've cycled through all artists or have issues, reset the history
if not remaining_artists:
lidarr_logger.info("All artists have been selected in previous cycles, starting fresh rotation")
# Keep the last selected artist in history to avoid immediate repetition
last_artist = last_selected_artists[-1] if last_selected_artists else None
last_selected_artists = [last_artist] if last_artist and last_artist in target_entities else []
remaining_artists = [a for a in target_entities if a not in last_selected_artists]
if remaining_artists:
# Select a random artist from remaining ones
selected_artist = random.choice(remaining_artists)
lidarr_logger.info(f"Selected artist ID {selected_artist} for this cycle (from {len(remaining_artists)} remaining artists)")
# Update history with this selection
last_selected_artists.append(selected_artist)
# Keep history to a reasonable size (based on total artists)
max_history = min(len(target_entities), 10) # Don't track more than 10 artists
if len(last_selected_artists) > max_history:
last_selected_artists = last_selected_artists[-max_history:]
# Save updated history
try:
os.makedirs(os.path.dirname(state_file_path), exist_ok=True)
with open(state_file_path, 'w') as f:
json.dump({"artists": last_selected_artists}, f)
lidarr_logger.debug(f"Saved artist rotation history: {last_selected_artists}")
except Exception as e:
lidarr_logger.error(f"Error saving artist rotation file: {e}")
# Override the normal selection logic
unprocessed_entities = [selected_artist]
else:
lidarr_logger.debug(f"Skipping already processed {search_entity_type} ID: {entity_id}")
lidarr_logger.info(f"Found {len(unprocessed_entities)} unprocessed {search_entity_type}s out of {len(target_entities)} total.")
lidarr_logger.warning("No artists available to select")
else:
# Filter out already processed entities using stateful management
unprocessed_entities = []
for entity_id in target_entities:
if not is_processed("lidarr", instance_name, str(entity_id)):
unprocessed_entities.append(entity_id)
else:
lidarr_logger.debug(f"Skipping already processed {search_entity_type} ID: {entity_id}")
lidarr_logger.info(f"Found {len(unprocessed_entities)} unprocessed {search_entity_type}s out of {len(target_entities)} total.")
if not unprocessed_entities:
lidarr_logger.info(f"No unprocessed {search_entity_type}s found for {instance_name}. All available {search_entity_type}s have been processed.")
@@ -158,12 +218,20 @@ def process_missing_albums(
return False
entities_to_search_ids = random.sample(unprocessed_entities, min(len(unprocessed_entities), total_items_to_process))
lidarr_logger.debug(f"Randomly selected {len(entities_to_search_ids)} {search_entity_type}s to search.")
lidarr_logger.info(f"Randomly selected {len(entities_to_search_ids)} {search_entity_type}s to search.")
lidarr_logger.debug(f"Unprocessed entities: {unprocessed_entities}")
lidarr_logger.debug(f"Entities to search: {entities_to_search_ids}")
# --- Trigger Search (Artist or Album) ---
if hunt_missing_mode == "artist":
lidarr_logger.info(f"Artist-based missing mode selected")
lidarr_logger.info(f"Found {len(entities_to_search_ids)} unprocessed artists to search.")
# Prepare a list for artist details log
artist_details_log = []
lidarr_logger.info(f"Triggering Artist Search for {len(entities_to_search_ids)} artists on {instance_name}...")
for artist_id in entities_to_search_ids:
for i, artist_id in enumerate(entities_to_search_ids):
if stop_check(): # Use the new stop_check function
lidarr_logger.warning("Shutdown requested during artist search trigger.")
break
@@ -176,6 +244,9 @@ def process_missing_albums(
artist_info = first_album.get('artist')
if artist_info and isinstance(artist_info, dict):
artist_name = artist_info.get('artistName', artist_name)
# Add to detailed log list
artist_details_log.append(f"{i+1}. {artist_name} (ID: {artist_id})")
lidarr_logger.info(f"Triggering Artist Search for '{artist_name}' (ID: {artist_id}) on instance {instance_name}")
try:
@@ -212,25 +283,21 @@ def process_missing_albums(
# Safely get title and artist name, provide defaults
title = album_info.get('title', f'Album ID {album_id}')
artist_name = album_info.get('artist', {}).get('artistName', 'Unknown Artist')
album_details_log.append(f"'{artist_name} - {title}' (ID: {album_id})")
album_details_log.append(f"{len(album_details_log) + 1}. {artist_name} - {title} (ID: {album_id})")
else:
# Fallback if album ID wasn't found in the fetched missing items (should be rare)
album_details_log.append(f'Album ID {album_id} (Details not found)')
album_details_log.append(f"{len(album_details_log) + 1}. Album ID {album_id} (Details not found)")
# Construct the detailed log message string
details_string = ', '.join(album_details_log)
log_message = f"*** DETAILED LOG *** Triggering Album Search for {len(album_ids_to_search)} albums (album mode) on instance {instance_name}: [{details_string}]"
# Add a debug log to show the details being constructed
lidarr_logger.debug(f"Constructed album details for logging: [{details_string}]")
# Ensure the INFO log uses the constructed message
lidarr_logger.info(log_message)
# Log each album on a new line for better readability
lidarr_logger.info(f"Albums selected for processing in this cycle:")
for album_detail in album_details_log:
lidarr_logger.info(f" {album_detail}")
# Use the correct API function name
command_id = lidarr_api.search_albums(api_url, api_key, api_timeout, album_ids_to_search)
if command_id:
# Also use descriptive list in debug log if needed
lidarr_logger.debug(f"Album search command triggered with ID: {command_id} for albums: [{details_string}]")
lidarr_logger.debug(f"Album search command triggered with ID: {command_id} for albums: [{', '.join(album_details_log)}]")
increment_stat("lidarr", "hunted") # Changed from "missing" to "hunted"
processed_count += len(album_ids_to_search) # Count albums searched
processed_artists_or_albums.update(album_ids_to_search)

View File

@@ -104,6 +104,21 @@ def process_cutoff_upgrades(
lidarr_logger.info("No album IDs selected for upgrade search. Skipping trigger.")
return False
# Prepare detailed album information for logging
album_details_log = []
for i, album in enumerate(albums_to_search):
# Extract useful information for logging
album_title = album.get('title', f'Album ID {album["id"]}')
artist_name = album.get('artist', {}).get('artistName', 'Unknown Artist')
quality = album.get('quality', {}).get('quality', {}).get('name', 'Unknown Quality')
album_details_log.append(f"{i+1}. {artist_name} - {album_title} (ID: {album['id']}, Current Quality: {quality})")
# Log each album on a separate line for better readability
if album_details_log:
lidarr_logger.info(f"Albums selected for quality upgrade in this cycle:")
for album_detail in album_details_log:
lidarr_logger.info(f" {album_detail}")
# Check stop event before triggering search
if stop_check and stop_check(): # Use the passed stop_check function
lidarr_logger.warning("Shutdown requested, stopping upgrade album search.")