From 61bc6803eed86d138dd46836a1f271c1c53102c1 Mon Sep 17 00:00:00 2001 From: eversio Date: Fri, 27 Sep 2024 19:59:14 +1000 Subject: [PATCH] fix: refactor and re-enable alldebrid The recent realdebrid download refactor broke alldebrid. This change updates alldebrid to match the new realdebrid patterns. --- src/program/downloaders/__init__.py | 4 +- src/program/downloaders/alldebrid.py | 776 ++++-------------- src/program/downloaders/realdebrid.py | 28 +- src/program/downloaders/shared.py | 19 +- src/tests/test_alldebrid_downloader.py | 176 ++++ .../test_data/alldebrid_magnet_delete.json | 6 + .../test_data/alldebrid_magnet_instant.json | 16 + .../alldebrid_magnet_instant_unavailable.json | 12 + ...ldebrid_magnet_status_one_downloading.json | 25 + .../alldebrid_magnet_status_one_ready.json | 40 + .../alldebrid_magnet_upload_not_ready.json | 16 + .../alldebrid_magnet_upload_ready.json | 16 + 12 files changed, 510 insertions(+), 624 deletions(-) create mode 100644 src/tests/test_alldebrid_downloader.py create mode 100644 src/tests/test_data/alldebrid_magnet_delete.json create mode 100644 src/tests/test_data/alldebrid_magnet_instant.json create mode 100644 src/tests/test_data/alldebrid_magnet_instant_unavailable.json create mode 100644 src/tests/test_data/alldebrid_magnet_status_one_downloading.json create mode 100644 src/tests/test_data/alldebrid_magnet_status_one_ready.json create mode 100644 src/tests/test_data/alldebrid_magnet_upload_not_ready.json create mode 100644 src/tests/test_data/alldebrid_magnet_upload_ready.json diff --git a/src/program/downloaders/__init__.py b/src/program/downloaders/__init__.py index 5d0daa69..59b539c7 100644 --- a/src/program/downloaders/__init__.py +++ b/src/program/downloaders/__init__.py @@ -17,7 +17,7 @@ def __init__(self): self.speed_mode = settings_manager.settings.downloaders.prefer_speed_over_quality self.service = next((service for service in [ RealDebridDownloader(), - #AllDebridDownloader(), + AllDebridDownloader(), #TorBoxDownloader() ] if service.initialized), None) @@ -133,4 +133,4 @@ def update_item_attributes(item: MediaItem, names: tuple[str, str]): episode.file = file["filename"] episode.folder = item.folder episode.alternative_folder = item.alternative_folder - episode.active_stream = {**item.active_stream, "files": [ episode.file ] } \ No newline at end of file + episode.active_stream = {**item.active_stream, "files": [ episode.file ] } diff --git a/src/program/downloaders/alldebrid.py b/src/program/downloaders/alldebrid.py index e7a9b428..5a3e04f0 100644 --- a/src/program/downloaders/alldebrid.py +++ b/src/program/downloaders/alldebrid.py @@ -1,48 +1,31 @@ -import contextlib -import time from datetime import datetime -from os.path import splitext -from pathlib import Path -from types import SimpleNamespace -from typing import Generator, List +from loguru import logger +from program.settings.manager import settings_manager as settings from requests import ConnectTimeout -from RTN.exceptions import GarbageTorrent -from RTN.extras import extract_episodes -from RTN.parser import parse -from sqlalchemy.orm import lazyload - -from program.db.db import db -from program.db.db_functions import get_stream_count, load_streams_in_pages -from program.media.item import Episode, MediaItem, Movie, Season, Show -from program.media.state import States -from program.media.stream import Stream -from program.settings.manager import settings_manager -from utils.logger import logger +from utils import request from utils.ratelimiter import RateLimiter -from utils.request import get, ping, post -WANTED_FORMATS = {".mkv", ".mp4", ".avi"} +from .shared import VIDEO_EXTENSIONS, FileFinder, premium_days_left + AD_BASE_URL = "https://api.alldebrid.com/v4" AD_AGENT = "Riven" -AD_PARAM_AGENT = f"agent={AD_AGENT}" + +inner_rate_limit = RateLimiter(12, 1) # 12 requests per second +overall_rate_limiter = RateLimiter(600, 60) # 600 requests per minute + class AllDebridDownloader: """All-Debrid API Wrapper""" def __init__(self): - self.rate_limiter = None self.key = "alldebrid" - self.settings = settings_manager.settings.downloaders.all_debrid - self.download_settings = settings_manager.settings.downloaders - self.auth_headers = {"Authorization": f"Bearer {self.settings.api_key}"} - self.proxy = self.settings.proxy_url if self.settings.proxy_enabled else None - self.inner_rate_limit = RateLimiter(12, 1) # 12 requests per second - self.overall_rate_limiter = RateLimiter(600, 60) # 600 requests per minute + self.settings = settings.settings.downloaders.all_debrid self.initialized = self.validate() - if not self.initialized: - return - logger.success("AllDebrid initialized!") + if self.initialized: + self.existing_hashes = [torrent["hash"] for torrent in get_torrents()] + self.file_finder = FileFinder("filename", "filesize") + logger.success("AllDebrid initialized!") def validate(self) -> bool: """Validate All-Debrid settings and API key""" @@ -51,594 +34,185 @@ def validate(self) -> bool: if not self.settings.api_key: logger.warning("All-Debrid API key is not set") return False - if not isinstance(self.download_settings.movie_filesize_min, int) or self.download_settings.movie_filesize_min < -1: - logger.error("All-Debrid movie filesize min is not set or invalid.") - return False - if not isinstance(self.download_settings.movie_filesize_max, int) or self.download_settings.movie_filesize_max < -1: - logger.error("All-Debrid movie filesize max is not set or invalid.") - return False - if not isinstance(self.download_settings.episode_filesize_min, int) or self.download_settings.episode_filesize_min < -1: - logger.error("All-Debrid episode filesize min is not set or invalid.") - return False - if not isinstance(self.download_settings.episode_filesize_max, int) or self.download_settings.episode_filesize_max < -1: - logger.error("All-Debrid episode filesize max is not set or invalid.") - return False if self.settings.proxy_enabled and not self.settings.proxy_url: logger.error("Proxy is enabled but no proxy URL is provided.") return False try: - response = ping( - f"{AD_BASE_URL}/user?{AD_PARAM_AGENT}", - additional_headers=self.auth_headers, - proxies=self.proxy, - specific_rate_limiter=self.inner_rate_limit, - overall_rate_limiter=self.overall_rate_limiter) - if response.is_ok: - user_info = response.data.data.user - expiration = user_info.premiumUntil or 0 + user_info = get_user() + if user_info: + user = user_info.get("data", {}).get("user", {}) + expiration = user.get("premiumUntil", 0) expiration_datetime = datetime.utcfromtimestamp(expiration) - time_left = expiration_datetime - datetime.utcnow() - days_left = time_left.days - hours_left, minutes_left = divmod(time_left.seconds // 3600, 60) - expiration_message = "" - - if days_left > 0: - expiration_message = f"Your account expires in {days_left} days." - elif hours_left > 0: - expiration_message = f"Your account expires in {hours_left} hours and {minutes_left} minutes." - else: - expiration_message = "Your account expires soon." - - if not user_info.isPremium or False: + expiration_message = premium_days_left(expiration_datetime) + + premium = bool(user.get("isPremium", False)) + if not premium: logger.error("You are not a premium member.") return False else: logger.log("DEBRID", expiration_message) - - return user_info.isPremium or False + + return premium except ConnectTimeout: logger.error("Connection to All-Debrid timed out.") except Exception as e: logger.exception(f"Failed to validate All-Debrid settings: {e}") return False - def run(self, item: MediaItem) -> bool: - """Download media item from all-debrid.com""" - return_value = False - if self.is_cached(item) and not self._is_downloaded(item): - self._download_item(item) - return_value = True - self.log_item(item) - return return_value - - @staticmethod - def log_item(item: MediaItem) -> None: - """Log only the files downloaded for the item based on its type.""" - if isinstance(item, Movie): - if item.file and item.folder: - logger.log("DEBRID", f"Downloaded {item.log_string} with file: {item.file}") - else: - logger.debug(f"Movie item missing file or folder: {item.log_string}") - elif isinstance(item, Episode): - if item.file and item.folder: - logger.log("DEBRID", f"Downloaded {item.log_string} with file: {item.file}") - else: - logger.debug(f"Episode item missing file or folder: {item.log_string}") - elif isinstance(item, Season): - for episode in item.episodes: - if episode.file and episode.folder: - logger.log("DEBRID", f"Downloaded {episode.log_string} with file: {episode.file}") - elif not episode.file: - logger.debug(f"Episode item missing file: {episode.log_string}") - elif not episode.folder: - logger.debug(f"Episode item missing folder: {episode.log_string}") - elif isinstance(item, Show): - for season in item.seasons: - for episode in season.episodes: - if episode.file and episode.folder: - logger.log("DEBRID", f"Downloaded {episode.log_string} with file: {episode.file}") - elif not episode.file: - logger.debug(f"Episode item missing file or folder: {episode.log_string}") - elif not episode.folder: - logger.debug(f"Episode item missing folder: {episode.log_string}") - else: - logger.debug(f"Unknown item type: {item.log_string}") - - def is_cached(self, item: MediaItem) -> bool: - """Check if item is cached on all-debrid.com""" - if not item.get("streams", {}): - return False - - logger.log("DEBRID", f"Processing streams for {item.log_string}") - - stream_count = get_stream_count(item._id) - processed_stream_hashes = set() - stream_hashes = {} # This will store the infohash to Stream object mapping - - number_of_rows_per_page = 5 - total_pages = (stream_count // number_of_rows_per_page) + 1 - - for page_number in range(total_pages): - with db.Session() as session: - for stream_id, infohash, stream in load_streams_in_pages(session, item._id, page_number, page_size=number_of_rows_per_page): - stream_hashes[infohash] = stream # Store the Stream object - - filtered_streams = [infohash for infohash in stream_hashes.keys() if infohash and infohash not in processed_stream_hashes] - if not filtered_streams: - continue - - try: - params = {"agent": AD_AGENT} - for i, magnet in enumerate(filtered_streams): - params[f"magnets[{i}]"] = magnet - - response = get(f"{AD_BASE_URL}/magnet/instant", params=params, additional_headers=self.auth_headers, proxies=self.proxy, response_type=dict, specific_rate_limiter=self.inner_rate_limit, overall_rate_limiter=self.overall_rate_limiter) - if response.is_ok and self._evaluate_stream_response(response.data, processed_stream_hashes, item, stream_hashes): - return True - except Exception as e: - logger.error(f"Error checking cache for streams: {str(e)}", exc_info=True) - - logger.log("NOT_FOUND", f"No wanted cached streams found for {item.log_string} after processing all chunks") - return False - - def _evaluate_stream_response(self, data, processed_stream_hashes, item, stream_hashes): - """Evaluate the response data from the stream availability check.""" - if data.get("status") != "success": - logger.error("Failed to get a successful response") - return False - - magnets = data.get("data", {}).get("magnets", []) - for magnet in magnets: - stream_hash = magnet.get("hash") - stream_hash_lower = stream_hash.lower() if stream_hash else None - - # Skip if the stream has already been processed or if the hash is not valid - if not stream_hash_lower or stream_hash_lower in processed_stream_hashes: - continue - - # Mark the stream as processed - processed_stream_hashes.add(stream_hash_lower) - - if not magnet.get("instant", False): + def process_hashes( + self, chunk: list[str], needed_media: dict, break_pointer: list[bool] + ) -> dict: + return self.get_cached_containers(chunk, needed_media, break_pointer) + + def download_cached(self, active_stream: dict) -> str: + torrent_id = add_torrent(active_stream.get("infohash")) + if torrent_id: + self.existing_hashes.append(active_stream.get("infohash")) + return torrent_id + raise Exception("Failed to download torrent.") + + def get_cached_containers( + self, infohashes: list[str], needed_media: dict, break_pointer: list[bool] + ) -> dict: + """ + Get containers that are available in the debrid cache containing `needed_media` + + Parameters: + - infohashes: a list of hashes that might contain the data we need + - needed_media: a dict of seasons, with lists of episodes, indicating what content is needed + - break_pointer: first bool indicates if the needed content was found yet, 2nd pointer indicates if we should break once it's found. + """ + cached_containers = {} + response = get_instant_availability(infohashes) + magnets = {m.get("hash"): m for m in response} + + for infohash in infohashes: + if all(break_pointer): + break + cached_containers[infohash] = {} + magnet = magnets.get(infohash, {}) + files = magnet.get("files", []) + if not files: continue - - stream = stream_hashes.get(stream_hash_lower) - if self._process_providers(item, magnet, stream_hash_lower): - return True - else: - if stream: - stream.blacklisted = True - - return False - def _process_providers(self, item: MediaItem, magnet: dict, stream_hash: str) -> bool: - """Process providers for an item""" - if not magnet or not stream_hash: - return False - - sorted_files = sorted( - (file for file in magnet.get("files", [])), - key=lambda file: file.get("s", 0), - reverse=True + # We avoid compressed downloads this way + def all_files_valid(files: list) -> bool: + filenames = [f.lower() for f, _ in walk_alldebrid_files(files)] + return all( + "sample" not in file and file.rsplit(".", 1)[-1] in VIDEO_EXTENSIONS + for file in filenames + ) + + if all_files_valid(files): + # The file_finder needs files to be in a dict, but it doesn't care about the keys + container = { + i: dict(filename=name, filesize=size) + for i, (name, size) in enumerate(walk_alldebrid_files(files)) + } + cached_containers[infohash] = self.file_finder.get_cached_container( + needed_media, break_pointer, container + ) + if cached_containers[infohash]: + break_pointer[0] = True + if break_pointer[1]: + break + + return cached_containers + + def get_torrent_names(self, id: str) -> dict: + info = get_status(id) + return info["filename"], info["filename"] + + def delete_torrent_with_infohash(self, infohash: str): + id = next( + torrent["id"] for torrent in get_torrents() if torrent["hash"] == infohash ) - - if isinstance(item, Movie): - for file in sorted_files: - if self._is_wanted_movie(file, item): - item.set("active_stream", {"hash": stream_hash, "files": magnet["files"], "id": None}) - return True - elif isinstance(item, Show): - for file in sorted_files: - if self._is_wanted_show(file, item): - item.set("active_stream", {"hash": stream_hash, "files": magnet["files"], "id": None}) - return True - elif isinstance(item, Season): - other_containers = [ - s for s in item.parent.seasons - if s != item and s.active_stream - and s.state not in (States.Indexed, States.Unknown) - ] - for c in other_containers: - if self._is_wanted_season(c.active_stream["files"], item): - item.set("active_stream", {"hash": c.active_stream["hash"], "files": c.active_stream["files"], "id": None}) - return True - for file in sorted_files: - if self._is_wanted_season(file, item): - item.set("active_stream", {"hash": stream_hash, "files": magnet["files"], "id": None}) - return True - elif isinstance(item, Episode): - for file in sorted_files: - if self._is_wanted_episode(file, item): - item.set("active_stream", {"hash": stream_hash, "files": magnet["files"], "id": None}) - return True - return False - - def _is_wanted_movie(self, file: dict, item: Movie) -> bool: - """Check if file is wanted for a movie""" - if not isinstance(item, Movie): - logger.error(f"Item is not a Movie instance: {item.log_string}") - return False - - min_size = self.download_settings.movie_filesize_min * 1_000_000 - max_size = self.download_settings.movie_filesize_max * 1_000_000 if self.download_settings.movie_filesize_max != -1 else float("inf") - - if not isinstance(file, dict) or file.get("s", 0) < min_size or file.get("s", 0) > max_size or splitext(file.get("n", "").lower())[1] not in WANTED_FORMATS: - return False - - parsed_file = parse(file["n"]) - if parsed_file and parsed_file.type == "movie": - item.set("folder", item.active_stream.get("name")) - item.set("alternative_folder", item.active_stream.get("alternative_name", None)) - item.set("file", file["n"]) - return True - return False + if id: + delete_torrent(id) - def _is_wanted_episode(self, file: dict, item: Episode) -> bool: - """Check if file is wanted for an episode""" - if not isinstance(item, Episode): - logger.error(f"Item is not an Episode instance: {item.log_string}") - return False - - min_size = self.download_settings.episode_filesize_min * 1_000_000 - max_size = self.download_settings.episode_filesize_max * 1_000_000 if self.download_settings.episode_filesize_max != -1 else float("inf") - - if not isinstance(file, dict) or file.get("s", 0) < min_size or file.get("s", 0) > max_size or splitext(file.get("n", "").lower())[1] not in WANTED_FORMATS: - return False - - one_season = len(item.parent.parent.seasons) == 1 - - parsed_file = parse(file["n"]) - if parsed_file and item.number in parsed_file.episodes and (item.parent.number in parsed_file.seasons or one_season): - item.set("folder", item.active_stream.get("name")) - item.set("alternative_folder", item.active_stream.get("alternative_name")) - item.set("file", file["n"]) - return True - return False - def _is_wanted_season(self, files: list, item: Season) -> bool: - """Check if files are wanted for a season""" - if not isinstance(item, Season): - logger.error(f"Item is not a Season instance: {item.log_string}") - return False - - min_size = self.download_settings.episode_filesize_min * 1_000_000 - max_size = self.download_settings.episode_filesize_max * 1_000_000 if self.download_settings.episode_filesize_max != -1 else float("inf") - - filenames = [ - file for file in files - if isinstance(file, dict) and file.get("s", 0) > min_size - and file.get("s", 0) < max_size - and splitext(file.get("n", "").lower())[1] in WANTED_FORMATS - ] - - if not filenames: - return False - - needed_episodes = {episode.number: episode for episode in item.episodes if episode.state in [States.Indexed, States.Scraped, States.Unknown, States.Failed]} - one_season = len(item.parent.seasons) == 1 - - matched_files = {} - season_num = item.number - - for file in filenames: - parsed_file = parse(file["n"]) - if parsed_file and (season_num in parsed_file.seasons or one_season): - for ep_num in parsed_file.episodes: - if ep_num in needed_episodes: - matched_files[ep_num] = file["n"] - - if not matched_files: - return False - - if needed_episodes.keys() == matched_files.keys(): - for ep_num, filename in matched_files.items(): - ep = needed_episodes[ep_num] - ep.set("folder", item.active_stream.get("name")) - ep.set("alternative_folder", item.active_stream.get("alternative_name")) - ep.set("file", filename) - return True - return False - - def _is_wanted_show(self, files: list, item: Show) -> bool: - """Check if files are wanted for a show""" - if not isinstance(item, Show): - logger.error(f"Item is not a Show instance: {item.log_string}") - return False - - min_size = self.download_settings.episode_filesize_min * 1_000_000 - max_size = self.download_settings.episode_filesize_max * 1_000_000 if self.download_settings.episode_filesize_max != -1 else float("inf") - - filenames = [ - file for file in files - if isinstance(file, dict) and file.get("s", 0) > min_size - and file.get("s", 0) < max_size - and splitext(file.get("n", "").lower())[1] in WANTED_FORMATS - ] - - if not filenames: - return False - - needed_episodes = {} - acceptable_states = [States.Indexed, States.Scraped, States.Unknown, States.Failed] - - for season in item.seasons: - if season.state in acceptable_states and season.is_released: - needed_episode_numbers = {episode.number for episode in season.episodes if episode.state in acceptable_states and episode.is_released} - if needed_episode_numbers: - needed_episodes[season.number] = needed_episode_numbers - if not needed_episodes: - return False - - matched_files = {} - for file in filenames: - parsed_file = parse(file["n"]) - if parsed_file: - for season_number, episodes in needed_episodes.items(): - if season_number in parsed_file.seasons: - for episode_number in list(episodes): - if episode_number in parsed_file.episodes: - matched_files[(season_number, episode_number)] = file - episodes.remove(episode_number) - - if not matched_files: - return False - - all_found = all(len(episodes) == 0 for episodes in needed_episodes.values()) - - if all_found: - for (season_number, episode_number), file in matched_files.items(): - season = next(season for season in item.seasons if season.number == season_number) - episode = next(episode for episode in season.episodes if episode.number == episode_number) - episode.set("folder", item.active_stream.get("name")) - episode.set("alternative_folder", item.active_stream.get("alternative_name", None)) - episode.set("file", file["n"]) - return True - return False - - def _is_downloaded(self, item: MediaItem) -> bool: - """Check if item is already downloaded after checking if it was cached""" - hash_key = item.active_stream.get("hash", None) - if not hash_key: - logger.log("DEBRID", f"Item missing hash, skipping check: {item.log_string}") - return False - - logger.debug(f"Checking if torrent is already downloaded for item: {item.log_string}") - torrent = self.get_torrent(hash_key) - - if not torrent: - logger.debug(f"No matching torrent found for hash: {hash_key}") - return False - - if item.active_stream.get("id", None): - logger.debug(f"Item already has an active stream ID: {item.active_stream.get('id')}") - return True - - info = self.get_torrent_info(torrent.id) - if not info or not hasattr(info, "links"): - logger.debug(f"Failed to get torrent info for ID: {torrent.id}") - return False - - if not self._matches_item(info, item): - return False - - # Cache this as downloaded - logger.debug(f"Marking torrent as downloaded for hash: {torrent.hash}") - item.set("active_stream.id", torrent.id) - self.set_active_files(item) - logger.debug(f"Set active files for item: {item.log_string} with {len(item.active_stream.get('files', {}))} total files") - return True - - def _download_item(self, item: MediaItem): - """Download item from all-debrid.com""" - logger.debug(f"Starting download for item: {item.log_string}") - request_id = self.add_magnet(item) - logger.debug(f"Magnet added to All-Debrid, request ID: {request_id} for {item.log_string}") - item.set("active_stream.id", request_id) - self.set_active_files(item) - logger.debug(f"Active files set for item: {item.log_string} with {len(item.active_stream.get('files', {}))} total files") - time.sleep(0.5) - logger.debug(f"Item marked as downloaded: {item.log_string}") - - def set_active_files(self, item: MediaItem) -> None: - """Set active files for item from all-debrid.com""" - active_stream = item.get("active_stream") - if not active_stream or "id" not in active_stream: - logger.error(f"Invalid active stream data for item: {item.log_string}") - return - - info = self.get_torrent_info(active_stream["id"]) - magnet_info = info.data.magnets - if not info or not magnet_info or not magnet_info.filename: - logger.error(f"Failed to get torrent info for item: {item.log_string}") - return - - item.active_stream["alternative_name"] = magnet_info.filename - item.active_stream["name"] = magnet_info.filename - - if not item.folder or not item.alternative_folder: - item.set("folder", item.active_stream.get("name")) - item.set("alternative_folder", item.active_stream.get("alternative_name")) - - # Ensure that the folder and file attributes are set - if isinstance(item, (Movie, Episode)): - if not item.file: - for link in magnet_info.links: - if hasattr(link, "files"): - for file in link.files: - if isinstance(file, SimpleNamespace) and hasattr(file, "e"): - for subfile in file.e: - if isinstance(item, Movie) and self._is_wanted_movie(subfile, item) or isinstance(item, Episode) and self._is_wanted_episode(subfile, item): - item.set("file", subfile.n) - break - if not item.folder or not item.alternative_folder or not item.file: - logger.error(f"Missing folder or alternative_folder or file for item: {item.log_string}") - return - - if isinstance(item, Season) and item.folder: - for episode in item.episodes: - if episode.file and not episode.folder: - episode.set("folder", item.folder) - - if isinstance(item, Show) and item.folder: - for season in item.seasons: - for episode in season.episodes: - if episode.file and not episode.folder: - episode.set("folder", item.folder) - - # Handle nested files in the links - for link in magnet_info.links: - if hasattr(link, "files"): - for file in link.files: - if isinstance(file, SimpleNamespace) and hasattr(file, "e"): - for subfile in file.e: - if isinstance(item, Season) and self._is_wanted_season(link.files, item) or isinstance(item, Show) and self._is_wanted_show(link.files, item): - break - - if isinstance(item, Season) and item.folder: - for episode in item.episodes: - if episode.file and not episode.folder: - episode.set("folder", item.folder) - - if isinstance(item, Show) and item.folder: - for season in item.seasons: - for episode in season.episodes: - if episode.file and not episode.folder: - episode.set("folder", item.folder) - - # Handle nested files in the links - for link in magnet_info.links: - if hasattr(link, "files"): - for file in link.files: - if isinstance(file, SimpleNamespace) and hasattr(file, "e"): - for subfile in file.e: - if isinstance(item, Season) and self._is_wanted_season(link.files, item) or isinstance(item, Show) and self._is_wanted_show(link.files, item): - break - - ### API Methods for All-Debrid below - def add_magnet(self, item: MediaItem) -> str: - """Add magnet link to All-Debrid""" - if not item.active_stream.get("hash"): - logger.error(f"No active stream or hash found for {item.log_string}") - return None - +def walk_alldebrid_files(files: dict) -> (str, int): + """Walks alldebrid's `files` nested dicts and returns (filename, filesize) for each file, discarding path information""" + dirs = [] + for f in files: try: - hash = item.active_stream.get("hash") - params = {"agent": AD_AGENT} - params["magnets[0]"] = hash - response = post( - f"{AD_BASE_URL}/magnet/upload", - params=params, - additional_headers=self.auth_headers, - proxies=self.proxy, - specific_rate_limiter=self.inner_rate_limit, - overall_rate_limiter=self.overall_rate_limiter - ) - if response.is_ok: - data = response.data.data - magnets = data.magnets or [] - if magnets: - return magnets[0].id - logger.error(f"Failed to add magnet: {response.data}") - except Exception as e: - logger.error(f"Error adding magnet for {item.log_string}: {e}") - return None - - def get_torrent_info(self, request_id: str) -> SimpleNamespace: - """Get torrent info from All-Debrid""" - if not request_id: - logger.error("No request ID found") - return SimpleNamespace() - - try: - response = get( - f"{AD_BASE_URL}/magnet/status?{AD_PARAM_AGENT}&id={request_id}", - additional_headers=self.auth_headers, - proxies=self.proxy, - specific_rate_limiter=self.inner_rate_limit, - overall_rate_limiter=self.overall_rate_limiter - ) - if response.is_ok: - return response.data - except Exception as e: - logger.error(f"Error getting torrent info for {request_id or 'UNKNOWN'}: {e}") - return SimpleNamespace() - - def get_torrent(self, hash_key: str) -> dict[str, SimpleNamespace]: - """Get torrents from All-Debrid""" - try: - response = get( - f"{AD_BASE_URL}/magnet/status?{AD_PARAM_AGENT}&id={hash_key}", - additional_headers=self.auth_headers, - proxies=self.proxy, - specific_rate_limiter=self.inner_rate_limit, - overall_rate_limiter=self.overall_rate_limiter - ) - if response.is_ok and response.data: - magnets = getattr(response.data, "magnets", []) - return {magnet.hash: SimpleNamespace(**magnet) for magnet in magnets} - except Exception as e: - logger.error(f"Error getting torrents from All-Debrid: {e}") - return {} - - def _matches_item(torrent_info: SimpleNamespace, item: MediaItem) -> bool: - """Check if the downloaded torrent matches the item specifics.""" - logger.debug(f"Checking if torrent matches item: {item.log_string}") - - if not hasattr(torrent_info, "files"): - logger.error(f"Torrent info for {item.log_string} does not have files attribute: {torrent_info}") - return False - - def check_movie(): - for file in torrent_info.files: - if file["selected"] == 1 and file["size"] > 200_000_000: - file_size_mb = file["size"] / (1024 * 1024) - if file_size_mb >= 1024: - file_size_gb = file_size_mb / 1024 - logger.debug(f"Selected file: {Path(file['path']).name} with size: {file_size_gb:.2f} GB") - else: - logger.debug(f"Selected file: {Path(file['path']).name} with size: {file_size_mb:.2f} MB") - return True - return False - - def check_episode(): - one_season = len(item.parent.parent.seasons) == 1 - item_number = item.number - parent_number = item.parent.number - for file in torrent_info.files: - if file["selected"] == 1: - file_episodes = extract_episodes(Path(file["path"]).name) - if (item_number in file_episodes and parent_number in file_episodes) or (one_season and item_number in file_episodes): - logger.debug(f"File {Path(file['path']).name} selected for episode {item_number} in season {parent_number}") - return True - return False - - def check_season(season): - season_number = season.number - episodes_in_season = {episode.number for episode in season.episodes} - matched_episodes = set() - one_season = len(season.parent.seasons) == 1 - for file in torrent_info.files: - if file["selected"] == 1: - file_episodes = extract_episodes(Path(file["path"]).name) - if season_number in file_episodes or one_season and file_episodes: - matched_episodes.update(file_episodes) - return len(matched_episodes) >= len(episodes_in_season) // 2 - - if isinstance(item, Movie): - if check_movie(): - logger.info(f"{item.log_string} already exists in All-Debrid account.") - return True - elif isinstance(item, Show): - if all(check_season(season) for season in item.seasons): - logger.info(f"{item.log_string} already exists in All-Debrid account.") - return True - elif isinstance(item, Season): - if check_season(item): - logger.info(f"{item.log_string} already exists in All-Debrid account.") - return True - elif isinstance(item, Episode) and check_episode(): - logger.info(f"{item.log_string} already exists in All-Debrid account.") - return True - - logger.debug(f"No matching item found for {item.log_string}") - return False \ No newline at end of file + size = int(f.get("s", "")) + yield f.get("n", "UNKNOWN"), size + except ValueError: + dirs.append(f) + + for d in dirs: + walk_alldebrid_files(d.get("e", [])) + + +def get(url, **params) -> dict: + params["agent"] = AD_AGENT # Add agent parameter per AllDebrid API requirement + return request.get( + url=f"{AD_BASE_URL}/{url}", + params=params, + additional_headers={ + "Authorization": f"Bearer {settings.settings.downloaders.all_debrid.api_key}" + }, + response_type=dict, + specific_rate_limiter=inner_rate_limit, + overall_rate_limiter=overall_rate_limiter, + proxies=settings.settings.downloaders.all_debrid.proxy_url + if settings.settings.downloaders.all_debrid.proxy_enabled + else None, + ).data + + +def get_user() -> dict: + return get("user") + + +def get_instant_availability(infohashes: list[str]) -> list[dict]: + try: + params = dict( + (f"magnets[{i}]", infohash) for i, infohash in enumerate(infohashes) + ) + data = get("magnet/instant", **params) + magnets = data.get("data", {}).get("magnets", []) + except Exception as e: + logger.warning("Failed to get instant availability.") + magnets = [e] + return magnets + + +def add_torrent(infohash: str) -> int: + try: + params={"magnets[]": f"magnet:?xt=urn:btih:{infohash}"} + id = get( + "magnet/upload", **params + )["data"]["magnets"][0]["id"] + except Exception: + logger.warning(f"Failed to add torrent with infohash {infohash}") + id = None + return id + + +def get_status(id: str) -> dict: + try: + info = get("magnet/status", id=f"{id}")["data"]["magnets"] + # Error if filename not present + info.get("filename") + except Exception: + logger.warning(f"Failed to get info for torrent with id {id}") + info = {} + return info + + +def get_torrents() -> list[dict]: + try: + torrents = get("magnet/status") + torrents = torrents.get("data", {}).get("magnets", []) + except Exception: + logger.warning("Failed to get torrents.") + torrents = [] + return torrents + +def delete_torrent(id: str): + try: + get("magnet/delete", id=id) + except Exception: + logger.warning(f"Failed to delete torrent with id {id}") diff --git a/src/program/downloaders/realdebrid.py b/src/program/downloaders/realdebrid.py index 43ad5472..c7780fdd 100644 --- a/src/program/downloaders/realdebrid.py +++ b/src/program/downloaders/realdebrid.py @@ -1,14 +1,12 @@ from datetime import datetime from loguru import logger -from requests import ConnectTimeout - -import utils.request as request -from program.media.item import MediaItem from program.settings.manager import settings_manager as settings +from requests import ConnectTimeout +from utils import request from utils.ratelimiter import RateLimiter -from .shared import VIDEO_EXTENSIONS, FileFinder +from .shared import VIDEO_EXTENSIONS, FileFinder, premium_days_left BASE_URL = "https://api.real-debrid.com/rest/1.0" @@ -39,17 +37,7 @@ def validate(self) -> bool: if user_info: expiration = user_info.get("expiration", "") expiration_datetime = datetime.fromisoformat(expiration.replace("Z", "+00:00")).replace(tzinfo=None) - time_left = expiration_datetime - datetime.utcnow().replace(tzinfo=None) - days_left = time_left.days - hours_left, minutes_left = divmod(time_left.seconds // 3600, 60) - expiration_message = "" - - if days_left > 0: - expiration_message = f"Your account expires in {days_left} days." - elif hours_left > 0: - expiration_message = f"Your account expires in {hours_left} hours and {minutes_left} minutes." - else: - expiration_message = "Your account expires soon." + expiration_message = premium_days_left(expiration_datetime) if user_info.get("type", "") != "premium": logger.error("You are not a premium member.") @@ -97,7 +85,7 @@ def get_cached_containers(self, infohashes: list[str], needed_media: dict, break def all_files_valid(file_dict: dict) -> bool: return all( any( - file["filename"].endswith(f'.{ext}') and "sample" not in file["filename"].lower() + file["filename"].endswith(f".{ext}") and "sample" not in file["filename"].lower() for ext in VIDEO_EXTENSIONS ) for file in file_dict.values() @@ -158,7 +146,7 @@ def delete(url): def add_torrent(infohash: str) -> int: try: - id = post(f"torrents/addMagnet", data={"magnet": f"magnet:?xt=urn:btih:{infohash}"})["id"] + id = post("torrents/addMagnet", data={"magnet": f"magnet:?xt=urn:btih:{infohash}"})["id"] except: logger.warning(f"Failed to add torrent with infohash {infohash}") id = None @@ -174,7 +162,7 @@ def add_torrent_magnet(magnet: str) -> str: def select_files(id: str, files: list[str]): try: - post(f"torrents/selectFiles/{id}", data={"files": ','.join(files)}) + post(f"torrents/selectFiles/{id}", data={"files": ",".join(files)}) except: logger.warning(f"Failed to select files for torrent with id {id}") @@ -208,4 +196,4 @@ def delete_torrent(id): try: delete(f"torrents/delete/{id}") except: - logger.warning(f"Failed to delete torrent with id {id}") \ No newline at end of file + logger.warning(f"Failed to delete torrent with id {id}") diff --git a/src/program/downloaders/shared.py b/src/program/downloaders/shared.py index 1637d99b..70e5f967 100644 --- a/src/program/downloaders/shared.py +++ b/src/program/downloaders/shared.py @@ -4,6 +4,8 @@ from program.media.state import States from program.settings.manager import settings_manager +from datetime import datetime + DEFAULT_VIDEO_EXTENSIONS = ["mp4", "mkv", "avi"] ALLOWED_VIDEO_EXTENSIONS = ["mp4", "mkv", "avi", "mov", "wmv", "flv", "m4v", "webm", "mpg", "mpeg", "m2ts", "ts"] @@ -82,4 +84,19 @@ def get_needed_media(item: MediaItem) -> dict: needed_media = {item.number: [episode.number for episode in item.episodes if episode.state in acceptable_states]} elif item.type == "episode": needed_media = {item.parent.number: [item.number]} - return needed_media \ No newline at end of file + return needed_media + +def premium_days_left(expiration: datetime) -> str: + """Convert an expiration date into a message showing days remaining on the user's premium account""" + time_left = expiration - datetime.utcnow() + days_left = time_left.days + hours_left, minutes_left = divmod(time_left.seconds // 3600, 60) + expiration_message = "" + + if days_left > 0: + expiration_message = f"Your account expires in {days_left} days." + elif hours_left > 0: + expiration_message = f"Your account expires in {hours_left} hours and {minutes_left} minutes." + else: + expiration_message = "Your account expires soon." + return expiration_message diff --git a/src/tests/test_alldebrid_downloader.py b/src/tests/test_alldebrid_downloader.py new file mode 100644 index 00000000..591672c9 --- /dev/null +++ b/src/tests/test_alldebrid_downloader.py @@ -0,0 +1,176 @@ +import json + +import pytest +from program.downloaders import alldebrid +from program.downloaders.alldebrid import ( + AllDebridDownloader, + add_torrent, + get_instant_availability, + get_status, + get_torrents, +) +from program.settings.manager import settings_manager as settings + + +@pytest.fixture +def downloader(instant, upload, status, status_all, delete): + """Instance of AllDebridDownloader with API calls mocked""" + # mock API calls + _get = alldebrid.get + def get(url, **params): + match url: + case "user": + return {"data": { "user": { "isPremium": True, "premiumUntil": 1735514599, } } } + case "magnet/instant": + return instant(url, **params) + case "magnet/upload": + return upload(url, **params) + case "magnet/delete": + return delete(url, **params) + case "magnet/status": + if params.get("id", False): + return status(url, **params) + else: + return status_all(url, **params) + case _: + raise Exception("unmatched api call") + alldebrid.get = get + + alldebrid_settings = settings.settings.downloaders.all_debrid + alldebrid_settings.enabled = True + alldebrid_settings.api_key = "key" + + downloader = AllDebridDownloader() + assert downloader.initialized + yield downloader + + # tear down mock + alldebrid.get = get + + +## Downloader tests +def test_process_hashes(downloader): + hashes = downloader.process_hashes(["abc"], None, [False, True]) + assert len(hashes) == 1 + + +def test_download_cached(downloader): + torrent_id = downloader.download_cached({"infohash": "abc"}) + assert torrent_id == MAGNET_ID + + +def test_get_torrent_names(downloader): + names = downloader.get_torrent_names(123) + assert names == ("Ubuntu 24.04", "Ubuntu 24.04") + + +## API parsing tests +def test_get_instant_availability(instant): + alldebrid.get = instant + infohashes = [UBUNTU] + availability = get_instant_availability(infohashes) + assert len(availability[0].get("files", [])) == 2 + + +def test_get_instant_availability_unavailable(instant_unavailable): + alldebrid.get = instant_unavailable + infohashes = [UBUNTU] + availability = get_instant_availability(infohashes) + assert availability[0]["hash"] == UBUNTU + + +def test_add_torrent(upload): + alldebrid.get = upload + torrent_id = add_torrent(UBUNTU) + assert torrent_id == 251993753 + + +def test_add_torrent_cached(upload_ready): + alldebrid.get = upload_ready + torrent_id = add_torrent(UBUNTU) + assert torrent_id == 251993753 + + +def test_get_status(status): + alldebrid.get = status + torrent_status = get_status(251993753) + assert torrent_status["filename"] == "Ubuntu 24.04" + + +def test_get_status_unfinished(status_downloading): + alldebrid.get = status_downloading + torrent_status = get_status(251993753) + assert torrent_status["status"] == "Downloading" + + +def test_get_torrents(status_all): + alldebrid.get = status_all + torrents = get_torrents() + assert torrents[0]["status"] == "Ready" + + +def test_delete(delete): + alldebrid.get = delete + delete(123) + + +# Example requests - taken from real API calls +UBUNTU = "3648baf850d5930510c1f172b534200ebb5496e6" +MAGNET_ID = 251993753 +@pytest.fixture +def instant(): + """GET /magnet/instant?magnets[0]=infohash (torrent available)""" + with open("src/tests/test_data/alldebrid_magnet_instant.json") as f: + body = json.load(f) + return lambda url, **params: body + +@pytest.fixture +def instant_unavailable(): + """GET /magnet/instant?magnets[0]=infohash (torrent unavailable)""" + with open("src/tests/test_data/alldebrid_magnet_instant_unavailable.json") as f: + body = json.load(f) + return lambda url, **params: body + +@pytest.fixture +def upload(): + """GET /magnet/upload?magnets[]=infohash (torrent not ready yet)""" + with open("src/tests/test_data/alldebrid_magnet_upload_not_ready.json") as f: + body = json.load(f) + return lambda url, **params: body + +@pytest.fixture +def upload_ready(): + """GET /magnet/upload?magnets[]=infohash (torrent ready)""" + with open("src/tests/test_data/alldebrid_magnet_upload_ready.json") as f: + body = json.load(f) + return lambda url, **params: body + +@pytest.fixture +def status(): + """GET /magnet/status?id=123 (debrid links ready)""" + with open("src/tests/test_data/alldebrid_magnet_status_one_ready.json") as f: + body = json.load(f) + return lambda url, **params: body + +@pytest.fixture +def status_downloading(): + """GET /magnet/status?id=123 (debrid links not ready yet)""" + with open("src/tests/test_data/alldebrid_magnet_status_one_downloading.json") as f: + body = json.load(f) + return lambda url, **params: body + +@pytest.fixture +def status_all(): + """GET /magnet/status (gets a list of all links instead of a single object)""" + # The body is the same as a single item, but with all your magnets in a list. + with open("src/tests/test_data/alldebrid_magnet_status_one_ready.json") as f: + body = json.load(f) + return lambda url, **params: {"status": "success", "data": {"magnets": [body["data"]["magnets"]]}} + +@pytest.fixture +def delete(): + """GET /delete""" + with open("src/tests/test_data/alldebrid_magnet_delete.json") as f: + body = json.load(f) + return lambda url, **params: body + diff --git a/src/tests/test_data/alldebrid_magnet_delete.json b/src/tests/test_data/alldebrid_magnet_delete.json new file mode 100644 index 00000000..8f3d657e --- /dev/null +++ b/src/tests/test_data/alldebrid_magnet_delete.json @@ -0,0 +1,6 @@ +{ + "status": "success", + "data": { + "message": "Magnet was successfully deleted" + } +} diff --git a/src/tests/test_data/alldebrid_magnet_instant.json b/src/tests/test_data/alldebrid_magnet_instant.json new file mode 100644 index 00000000..4730cdc0 --- /dev/null +++ b/src/tests/test_data/alldebrid_magnet_instant.json @@ -0,0 +1,16 @@ +{ + "status": "success", + "data": { + "magnets": [ + { + "magnet": "3648baf850d5930510c1f172b534200ebb5496e6", + "hash": "3648baf850d5930510c1f172b534200ebb5496e6", + "instant": true, + "files": [ + {"n": "ubuntu-24.04-desktop-amd64.iso", "s": 6114656256}, + {"n": "ubuntu-24.04-live-server-amd64.iso", "s": 2754981888} + ] + } + ] + } +} diff --git a/src/tests/test_data/alldebrid_magnet_instant_unavailable.json b/src/tests/test_data/alldebrid_magnet_instant_unavailable.json new file mode 100644 index 00000000..e553e258 --- /dev/null +++ b/src/tests/test_data/alldebrid_magnet_instant_unavailable.json @@ -0,0 +1,12 @@ + { + "status": "success", + "data": { + "magnets": [ + { + "magnet": "3648baf850d5930510c1f172b534200ebb5496e6", + "hash": "3648baf850d5930510c1f172b534200ebb5496e6", + "instant": false + } + ] + } +} diff --git a/src/tests/test_data/alldebrid_magnet_status_one_downloading.json b/src/tests/test_data/alldebrid_magnet_status_one_downloading.json new file mode 100644 index 00000000..016f6f9f --- /dev/null +++ b/src/tests/test_data/alldebrid_magnet_status_one_downloading.json @@ -0,0 +1,25 @@ +{ + "status": "success", + "data": { + "magnets": { + "id": 251993753, + "filename": "Ubuntu 24.04", + "size": 8869638144, + "hash": "3648baf850d5930510c1f172b534200ebb5496e6", + "status": "Downloading", + "statusCode": 1, + "downloaded": 165063971, + "uploaded": 0, + "seeders": 6, + "downloadSpeed": 4782727, + "processingPerc": 0, + "uploadSpeed": 0, + "uploadDate": 1727454272, + "completionDate": 0, + "links": [], + "type": "m", + "notified": false, + "version": 2 + } + } +} diff --git a/src/tests/test_data/alldebrid_magnet_status_one_ready.json b/src/tests/test_data/alldebrid_magnet_status_one_ready.json new file mode 100644 index 00000000..a7a0c621 --- /dev/null +++ b/src/tests/test_data/alldebrid_magnet_status_one_ready.json @@ -0,0 +1,40 @@ +{ + "status": "success", + "data": { + "magnets": { + "id": 251993753, + "filename": "Ubuntu 24.04", + "size": 8869638144, + "hash": "3648baf850d5930510c1f172b534200ebb5496e6", + "status": "Ready", + "statusCode": 4, + "downloaded": 8869638144, + "uploaded": 0, + "seeders": 0, + "downloadSpeed": 0, + "processingPerc": 0, + "uploadSpeed": 0, + "uploadDate": 1727454272, + "completionDate": 1727454803, + "links": [ + { + "filename": "ubuntu-24.04-desktop-amd64.iso", + "size": 6114656256, + "files": [{"n": "ubuntu-24.04-desktop-amd64.iso", "s": 6114656256}], + "link": "https://alldebrid.com/f/REDACTED" + }, + { + "filename": "ubuntu-24.04-live-server-amd64.iso", + "size": 2754981888, + "files": [ + {"n": "ubuntu-24.04-live-server-amd64.iso", "s": 2754981888} + ], + "link": "https://alldebrid.com/f/REDACTED" + } + ], + "type": "m", + "notified": false, + "version": 2 + } + } +} diff --git a/src/tests/test_data/alldebrid_magnet_upload_not_ready.json b/src/tests/test_data/alldebrid_magnet_upload_not_ready.json new file mode 100644 index 00000000..1f4ae8cc --- /dev/null +++ b/src/tests/test_data/alldebrid_magnet_upload_not_ready.json @@ -0,0 +1,16 @@ +{ + "status": "success", + "data": { + "magnets": [ + { + "magnet": "magnet:?xt=urn:btih:3648baf850d5930510c1f172b534200ebb5496e6", + "hash": "3648baf850d5930510c1f172b534200ebb5496e6", + "name": "noname", + "filename_original": "", + "size": 0, + "ready": false, + "id": 251993753 + } + ] + } +} diff --git a/src/tests/test_data/alldebrid_magnet_upload_ready.json b/src/tests/test_data/alldebrid_magnet_upload_ready.json new file mode 100644 index 00000000..c49b65c1 --- /dev/null +++ b/src/tests/test_data/alldebrid_magnet_upload_ready.json @@ -0,0 +1,16 @@ +{ + "status": "success", + "data": { + "magnets": [ + { + "magnet": "magnet:?xt=urn:btih:3648baf850d5930510c1f172b534200ebb5496e6", + "hash": "3648baf850d5930510c1f172b534200ebb5496e6", + "name": "Ubuntu 24.04", + "filename_original": "", + "size": 8869638144, + "ready": true, + "id": 251993753 + } + ] + } +}