From d8dc6ccd45fdb80bef2bf1d770475a44630ac009 Mon Sep 17 00:00:00 2001 From: Zachary Patten Date: Sun, 6 Apr 2025 09:41:07 -0500 Subject: [PATCH] - Wait for commands to complete to avoid overloading the command queue - Add a minimum donwload queue size to avoid overloading the download queue --- README.md | 41 +++++++++++++++++++++++++++++------------ api.py | 47 ++++++++++++++++++++++++++++++++++++++++++++--- config.py | 23 +++++++++++++++++++++++ main.py | 28 ++++++++++++++++++---------- missing.py | 10 ++++------ upgrade.py | 13 +++++-------- 6 files changed, 123 insertions(+), 39 deletions(-) diff --git a/README.md b/README.md index c6cba0c3..76855f28 100644 --- a/README.md +++ b/README.md @@ -98,18 +98,21 @@ My 12-year-old daughter is passionate about singing, dancing, and exploring STEM The following environment variables can be configured: -| Variable | Description | Default | -|------------------------------|-----------------------------------------------------------------------|---------------| -| `API_KEY` | Your Sonarr API key | Required | -| `API_URL` | URL to your Sonarr instance | Required | -| `API_TIMEOUT` | Timeout in seconds for API requests to Sonarr | 60 | -| `MONITORED_ONLY` | Only process monitored shows/episodes | true | -| `HUNT_MISSING_SHOWS` | Maximum missing shows to process per cycle | 1 | -| `HUNT_UPGRADE_EPISODES` | Maximum upgrade episodes to process per cycle | 0 | -| `SLEEP_DURATION` | Seconds to wait after completing a cycle (900 = 15 minutes) | 900 | -| `RANDOM_SELECTION` | Use random selection (`true`) or sequential (`false`) | true | -| `STATE_RESET_INTERVAL_HOURS` | Hours which the processed state files reset (168=1 week, 0=never reset) | 168 | -| `DEBUG_MODE` | Enable detailed debug logging (`true` or `false`) | false | +| Variable | Description | Default | +|-------------------------------|-----------------------------------------------------------------------|---------------| +| `API_KEY` | Your Sonarr API key | Required | +| `API_URL` | URL to your Sonarr instance | Required | +| `API_TIMEOUT` | Timeout in seconds for API requests to Sonarr | 60 | +| `MONITORED_ONLY` | Only process monitored shows/episodes | true | +| `HUNT_MISSING_SHOWS` | Maximum missing shows to process per cycle | 1 | +| `HUNT_UPGRADE_EPISODES` | Maximum upgrade episodes to process per cycle | 0 | +| `SLEEP_DURATION` | Seconds to wait after completing a cycle (900 = 15 minutes) | 900 | +| `RANDOM_SELECTION` | Use random selection (`true`) or sequential (`false`) | true | +| `STATE_RESET_INTERVAL_HOURS` | Hours which the processed state files reset (168=1 week, 0=never reset) | 168 | +| `DEBUG_MODE` | Enable detailed debug logging (`true` or `false`) | false | +| `COMMAND_WAIT_DELAY` | Delay in seconds between checking for command status | 1 | +| `COMMAND_WAIT_ATTEMPTS` | Number of attempts to check for command completeion before giving up | 600 | +| `MINIMUM_DOWNLOAD_QUEUE_SIZE` | Minimum number of items in the download queue before starting a hunt | -1 | ### Detailed Configuration Explanation @@ -146,6 +149,20 @@ The following environment variables can be configured: - When set to `true`, the script will output detailed debugging information about API responses and internal operations. - Useful for troubleshooting issues but can make logs verbose. +- **COMMAND_WAIT_DELAY** + - Certain operations like refreshing and searching happen asynchronously. + - This is the delay in seconds between checking the status of these operations for completion. + - By checking for these to complete before proceeding we can ensure we do not overload the command queue. + - Operations like refreshing update show metadata so this ensures those actions are fully completed before additional operations are performed. + +- **COMMAND_WAIT_ATTEMPTS** + - The number of attempts to wait for an operation to complete before giving up. If a command times out the operation will be considered failed. + +- **MINIMUM_DOWNLOAD_QUEUE_SIZE** + - The minimum number of items in the download queue before a new hunt is initiated. For example if set to `5` then a new hunt will only start when there are 5 or less items marked as `downloading` in the queue. + - This helps prevent overwhelming the queue with too many download requests at once and avoids creating a massive backlog of downloads. + - Set to `-1` to disable this check. + --- ## Installation Methods diff --git a/api.py b/api.py index a2bf6edf..1cf21b94 100644 --- a/api.py +++ b/api.py @@ -5,9 +5,10 @@ Handles all communication with the Sonarr API """ import requests +import time from typing import List, Dict, Any, Optional, Union from utils.logger import logger, debug_log -from config import API_KEY, API_URL, API_TIMEOUT +from config import API_KEY, API_URL, API_TIMEOUT, COMMAND_WAIT_DELAY, COMMAND_WAIT_ATTEMPTS # Create a session for reuse session = requests.Session() @@ -37,6 +38,31 @@ def sonarr_request(endpoint: str, method: str = "GET", data: Dict = None) -> Opt except requests.exceptions.RequestException as e: logger.error(f"API request error: {e}") return None + +def wait_for_command(command_id: int): + logger.debug(f"Waiting for command {command_id} to complete...") + attempts = 0 + while True: + try: + time.sleep(COMMAND_WAIT_DELAY) + response = sonarr_request(f"command/{command_id}") + logger.debug(f"Command {command_id} Status: {response['status']}") + except Exception as error: + logger.error(f"Error fetching command status on attempt {attempts + 1}: {error}") + return False + + attempts += 1 + + if response['status'].lower() in ['complete', 'completed'] or attempts >= COMMAND_WAIT_ATTEMPTS: + break + + if response['status'].lower() not in ['complete', 'completed']: + logger.warning(f"Command {command_id} did not complete within the allowed attempts.") + return False + + time.sleep(0.5) + + return response['status'].lower() in ['complete', 'completed'] def get_series() -> List[Dict]: """Get all series from Sonarr.""" @@ -57,7 +83,8 @@ def refresh_series(series_id: int) -> Optional[Dict]: "name": "RefreshSeries", "seriesId": series_id } - return sonarr_request("command", method="POST", data=data) + response = sonarr_request("command", method="POST", data=data) + return wait_for_command(response['id']) def episode_search_episodes(episode_ids: List[int]) -> Optional[Dict]: """ @@ -71,7 +98,21 @@ def episode_search_episodes(episode_ids: List[int]) -> Optional[Dict]: "name": "EpisodeSearch", "episodeIds": episode_ids } - return sonarr_request("command", method="POST", data=data) + response = sonarr_request("command", method="POST", data=data) + return wait_for_command(response['id']) + +def get_download_queue_size() -> Optional[int]: + """ + GET /api/v3/queue + Returns total number of items in the queue with the status 'downloading'. + """ + response = sonarr_request("queue?status=downloading") + total_records = response.get("totalRecords", 0) + if not isinstance(total_records, int): + total_records = 0 + logger.debug(f"Download Queue Size: {total_records}") + + return total_records def get_cutoff_unmet(page: int = 1) -> Optional[Dict]: """ diff --git a/config.py b/config.py index 1cf631f7..fd2ad61b 100644 --- a/config.py +++ b/config.py @@ -46,6 +46,27 @@ except ValueError: STATE_RESET_INTERVAL_HOURS = 168 print(f"Warning: Invalid STATE_RESET_INTERVAL_HOURS value, using default: {STATE_RESET_INTERVAL_HOURS}") +# Delay in seconds between checking the status of a command (default 1 second) +try: + COMMAND_WAIT_DELAY = int(os.environ.get("COMMAND_WAIT_DELAY", "1")) +except ValueError: + COMMAND_WAIT_DELAY = 1 + print(f"Warning: Invalid COMMAND_WAIT_DELAY value, using default: {COMMAND_WAIT_DELAY}") + +# Number of attempts to wait for a command to complete before giving up (default 600 attempts) +try: + COMMAND_WAIT_ATTEMPTS = int(os.environ.get("COMMAND_WAIT_ATTEMPTS", "600")) +except ValueError: + COMMAND_WAIT_ATTEMPTS = 600 + print(f"Warning: Invalid COMMAND_WAIT_ATTEMPTS value, using default: {COMMAND_WAIT_ATTEMPTS}") + +# Minimum size of the download queue before starting a hunt (default -1) +try: + MINIMUM_DOWNLOAD_QUEUE_SIZE = int(os.environ.get("MINIMUM_DOWNLOAD_QUEUE_SIZE", "-1")) +except ValueError: + MINIMUM_DOWNLOAD_QUEUE_SIZE = -1 + print(f"Warning: Invalid MINIMUM_DOWNLOAD_QUEUE_SIZE value, using default: {MINIMUM_DOWNLOAD_QUEUE_SIZE}") + # Selection Settings RANDOM_SELECTION = os.environ.get("RANDOM_SELECTION", "true").lower() == "true" MONITORED_ONLY = os.environ.get("MONITORED_ONLY", "true").lower() == "true" @@ -64,6 +85,8 @@ def log_configuration(logger): logger.info(f"Missing Content Configuration: HUNT_MISSING_SHOWS={HUNT_MISSING_SHOWS}") logger.info(f"Upgrade Configuration: HUNT_UPGRADE_EPISODES={HUNT_UPGRADE_EPISODES}") logger.info(f"State Reset Interval: {STATE_RESET_INTERVAL_HOURS} hours") + logger.info(f"Minimum Download Queue Size: {MINIMUM_DOWNLOAD_QUEUE_SIZE}") logger.info(f"MONITORED_ONLY={MONITORED_ONLY}, RANDOM_SELECTION={RANDOM_SELECTION}") logger.info(f"HUNT_MODE={HUNT_MODE}, SLEEP_DURATION={SLEEP_DURATION}s") + logger.info(f"COMMAND_WAIT_DELAY={COMMAND_WAIT_DELAY}, COMMAND_WAIT_ATTEMPTS={COMMAND_WAIT_ATTEMPTS}") logger.debug(f"API_KEY={API_KEY}") \ No newline at end of file diff --git a/main.py b/main.py index 24b08d06..636f59f0 100644 --- a/main.py +++ b/main.py @@ -7,10 +7,11 @@ Main entry point for the application import time import sys from utils.logger import logger -from config import HUNT_MODE, SLEEP_DURATION, log_configuration +from config import HUNT_MODE, SLEEP_DURATION, MINIMUM_DOWNLOAD_QUEUE_SIZE, log_configuration from missing import process_missing_episodes from upgrade import process_cutoff_upgrades from state import check_state_reset, calculate_reset_time +from api import get_download_queue_size def main_loop() -> None: """Main processing loop for Huntarr-Sonarr""" @@ -22,16 +23,23 @@ def main_loop() -> None: # Track if any processing was done in this cycle processing_done = False + + # Check if we should ignore the download queue size or if we are below the minimum queue size + download_queue_size = get_download_queue_size() + if MINIMUM_DOWNLOAD_QUEUE_SIZE < 0 or (MINIMUM_DOWNLOAD_QUEUE_SIZE >= 0 and download_queue_size <= MINIMUM_DOWNLOAD_QUEUE_SIZE): - # Process shows/episodes based on HUNT_MODE - if HUNT_MODE in ["missing", "both"]: - if process_missing_episodes(): - processing_done = True - - if HUNT_MODE in ["upgrade", "both"]: - if process_cutoff_upgrades(): - processing_done = True - + # Process shows/episodes based on HUNT_MODE + if HUNT_MODE in ["missing", "both"]: + if process_missing_episodes(): + processing_done = True + + if HUNT_MODE in ["upgrade", "both"]: + if process_cutoff_upgrades(): + processing_done = True + + else: + logger.info(f"Download queue size ({download_queue_size}) is above the minimum threshold ({MINIMUM_DOWNLOAD_QUEUE_SIZE}). Skipped processing.") + # Calculate time until the next reset calculate_reset_time() diff --git a/missing.py b/missing.py index 39d780ff..b9af8572 100644 --- a/missing.py +++ b/missing.py @@ -93,20 +93,18 @@ def process_missing_episodes() -> bool: # Refresh the series logger.info(f" - Refreshing series (ID: {series_id})...") refresh_res = refresh_series(series_id) - if not refresh_res or "id" not in refresh_res: + if not refresh_res: logger.warning(f"WARNING: Refresh command failed for {show_title}. Skipping.") - time.sleep(5) continue - logger.info(f"Refresh command accepted (ID: {refresh_res['id']}). Waiting 5s...") - time.sleep(5) + logger.info(f"Refresh command completed successfully.") # Search specifically for these missing + monitored episodes episode_ids = [ep["id"] for ep in monitored_missing_episodes] logger.info(f" - Searching for {len(episode_ids)} missing episodes in '{show_title}'...") search_res = episode_search_episodes(episode_ids) - if search_res and "id" in search_res: - logger.info(f"Search command accepted (ID: {search_res['id']}).") + if search_res: + logger.info(f"Search command completed successfully.") processing_done = True else: logger.warning(f"WARNING: EpisodeSearch failed for show '{show_title}' (ID: {series_id}).") diff --git a/upgrade.py b/upgrade.py index aac6bf0a..79e68c79 100644 --- a/upgrade.py +++ b/upgrade.py @@ -103,19 +103,17 @@ def process_cutoff_upgrades() -> bool: # Refresh the series logger.info(" - Refreshing series information...") refresh_res = refresh_series(series_id) - if not refresh_res or "id" not in refresh_res: + if not refresh_res: logger.warning("WARNING: Refresh command failed. Skipping this episode.") - time.sleep(10) continue - - logger.info(f"Refresh command accepted (ID: {refresh_res['id']}). Waiting 5s...") - time.sleep(5) + + logger.info(f"Refresh command completed successfully.") # Search for the episode (upgrade) logger.info(" - Searching for quality upgrade...") search_res = episode_search_episodes([episode_id]) - if search_res and "id" in search_res: - logger.info(f"Search command accepted (ID: {search_res['id']}).") + if search_res: + logger.info(f"Search command completed successfully.") # Mark processed save_processed_id(PROCESSED_UPGRADE_FILE, episode_id) episodes_processed += 1 @@ -123,7 +121,6 @@ def process_cutoff_upgrades() -> bool: logger.info(f"Processed {episodes_processed}/{HUNT_UPGRADE_EPISODES} upgrade episodes this cycle.") else: logger.warning(f"WARNING: Search command failed for episode ID {episode_id}.") - time.sleep(10) continue # Move to the next page if not random