Skip to content

Commit

Permalink
feat: improved ui (rivenmedia#422) (rivenmedia#512)
Browse files Browse the repository at this point in the history
* testing

* testing

* minor fixes

* feat: some frontend ui changes :)

* minor tweaks

* added movies and top movies section

* nearly completed homepage

* fix: fix settings and improvements to homepage

* chore: update app name to Riven and fix page header in library and onboarding

* feat: switch to vaul-svelte dependency for mobile ui and improvements to ui in general

* chore: update app name to Riven, improve UI consistency and changes to items endpoint

* feat: Add incomplete items to statistics page

* feat: Add services status to statistics page

feat: add lazy loading for images in statistics and home pages (rivenmedia#502)

fix: min/max filesize being returned undefined
fix: minor ui improvements (rivenmedia#503)

* fix: minor ui improvements

* chore: formatted files
fix: lower the z index and increase z index of header (rivenmedia#504)

feat: add top rated section (rivenmedia#505)

* feat: add top rated section

* chore: format files
fix: text color on light theme (rivenmedia#506)

chore(deps-dev): bump vitest from 1.6.0 to 2.0.1 in /frontend (rivenmedia#498)

Bumps [vitest](https://github.com/vitest-dev/vitest/tree/HEAD/packages/vitest) from 1.6.0 to 2.0.1.
- [Release notes](https://github.com/vitest-dev/vitest/releases)
- [Commits](https://github.com/vitest-dev/vitest/commits/v2.0.1/packages/vitest)

---
updated-dependencies:
- dependency-name: vitest
  dependency-type: direct:development
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <[email protected]>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Rate limiting for RD

- Change requests models to take through an optional instance of the rate limiter class
- Implement rate limiter settings for RD, a Conservative 100 calls per 60 seconds as default
- Ensure if enabled the rate limiter is initialised within the RD class
- Ensure all Rd calls (apart from ping) use the initialised rate limiter if present in requests

Add RD rate limit settings to general settings pane - frontend

Ignore mac and rider specific settings

remove config from frontend - this is internal now, non configurable

remove config, and use oll the time

1 per second

Split out rate limiter to seprate file, also work in 429's and handle overall rate limiting and endpoint specific ratelimiting

Use get for ping, and pass rate limiters

Added a user agent factory, and randomiser in requests

Updated zilean scraper to filtered endpoint

Only on movies. Aired at wont be right for seasons, we need a better way to do this across the board
  • Loading branch information
iPromKnight committed Jul 22, 2024
1 parent 7fc78b5 commit 8824154
Show file tree
Hide file tree
Showing 27 changed files with 318 additions and 217 deletions.
8 changes: 7 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -49,4 +49,10 @@ env/
venv/
ENV/
env.bak/
venv.bak/
venv.bak/

# Rider IDE
**/.idea/

# MacOs
**/.DS_Store
8 changes: 4 additions & 4 deletions backend/program/content/listrr.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,11 +49,11 @@ def validate(self) -> bool:
return False
try:
response = ping("https://listrr.pro/", additional_headers=self.headers)
if not response.ok:
if not response.is_ok:
logger.error(
f"Listrr ping failed - Status Code: {response.status_code}, Reason: {response.reason}",
f"Listrr ping failed - Status Code: {response.status_code}, Reason: {response.response.reason}",
)
return response.ok
return response.is_ok
except Exception as e:
logger.error(f"Listrr ping exception: {e}")
return False
Expand Down Expand Up @@ -102,4 +102,4 @@ def _get_items_from_Listrr(self, content_type, content_lists) -> list[MediaItem]
logger.error(f"An error occurred: {e}")
break
page += 1
return list(unique_ids)
return list(unique_ids)
7 changes: 4 additions & 3 deletions backend/program/content/mdblist.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
from program.media.item import MediaItem
from program.settings.manager import settings_manager
from utils.logger import logger
from utils.request import RateLimiter, RateLimitExceeded, get, ping
from utils.request import get, ping
from utils.ratelimiter import RateLimiter, RateLimitExceeded


class Mdblist:
Expand Down Expand Up @@ -33,7 +34,7 @@ def validate(self):
logger.error("Mdblist is enabled, but list is empty.")
return False
response = ping(f"https://mdblist.com/api/user?apikey={self.settings.api_key}")
if "Invalid API key!" in response.text:
if "Invalid API key!" in response.response.text:
logger.error("Mdblist api key is invalid.")
return False
return True
Expand Down Expand Up @@ -91,4 +92,4 @@ def list_items_by_url(url: str, api_key: str):
url = url if url.endswith("/") else f"{url}/"
url = url if url.endswith("json/") else f"{url}json/"
response = get(url, params={"apikey": api_key})
return response.data
return response.data
6 changes: 3 additions & 3 deletions backend/program/content/overseerr.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,10 +40,10 @@ def validate(self) -> bool:
)
if response.status_code >= 201:
logger.error(
f"Overseerr ping failed - Status Code: {response.status_code}, Reason: {response.reason}"
f"Overseerr ping failed - Status Code: {response.status_code}, Reason: {response.response.reason}"
)
return False
return response.ok
return response.is_ok
except (ConnectionError, RetryError, MaxRetryError, NewConnectionError) as e:
logger.error(f"Overseerr URL is not reachable, or it timed out")
return False
Expand Down Expand Up @@ -223,4 +223,4 @@ def mark_completed(mediaId: int) -> bool:
# 2 = PENDING,
# 3 = PROCESSING,
# 4 = PARTIALLY_AVAILABLE,
# 5 = AVAILABLE
# 5 = AVAILABLE
4 changes: 2 additions & 2 deletions backend/program/content/plex_watchlist.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def validate(self):
for rss_url in self.settings.rss:
try:
response = ping(rss_url)
response.raise_for_status()
response.response.raise_for_status()
self.rss_enabled = True
return True
except HTTPError as e:
Expand Down Expand Up @@ -121,4 +121,4 @@ def _extract_imdb_ids(self, guids):
if guid.startswith("imdb://"):
imdb_id = guid.split("//")[-1]
if imdb_id:
yield imdb_id
yield imdb_id
5 changes: 3 additions & 2 deletions backend/program/content/trakt.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,8 @@
from program.settings.manager import settings_manager
from requests import RequestException
from utils.logger import logger
from utils.request import RateLimiter, get, post
from utils.request import get, post
from utils.ratelimiter import RateLimiter


class TraktContent:
Expand Down Expand Up @@ -359,4 +360,4 @@ def _resolve_short_url(short_url) -> str or None:
patterns: dict[str, re.Pattern] = {
"user_list": re.compile(r'https://trakt.tv/users/([^/]+)/lists/([^/]+)'),
"short_list": re.compile(r'https://trakt.tv/lists/\d+')
}
}
35 changes: 25 additions & 10 deletions backend/program/downloaders/realdebrid.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from RTN.patterns import extract_episodes
from utils.logger import logger
from utils.request import get, ping, post
from utils.ratelimiter import RateLimiter

WANTED_FORMATS = {".mkv", ".mp4", ".avi"}
RD_BASE_URL = "https://api.real-debrid.com/rest/1.0"
Expand All @@ -26,11 +27,14 @@ class Debrid:
"""Real-Debrid API Wrapper"""

def __init__(self, hash_cache):
self.rate_limiter = None
self.key = "realdebrid"
self.settings = settings_manager.settings.downloaders.real_debrid
self.download_settings = settings_manager.settings.downloaders
self.auth_headers = {"Authorization": f"Bearer {self.settings.api_key}"}
self.proxy = self.settings.proxy_url if self.settings.proxy_enabled else None
self.torrents_rate_limiter = RateLimiter(1, 1)
self.overall_rate_limiter = RateLimiter(60, 60)
self.initialized = self.validate()
if not self.initialized:
return
Expand Down Expand Up @@ -61,9 +65,13 @@ def validate(self) -> bool:
logger.error("Proxy is enabled but no proxy URL is provided.")
return False
try:
response = ping(f"{RD_BASE_URL}/user", additional_headers=self.auth_headers, proxies=self.proxy)
if response.ok:
user_info = response.json()
response = ping(
f"{RD_BASE_URL}/user",
additional_headers=self.auth_headers,
proxies=self.proxy,
overall_rate_limiter=self.overall_rate_limiter)
if response.is_ok:
user_info = response.response.json()
expiration = user_info.get("expiration", "")
expiration_datetime = datetime.fromisoformat(expiration.replace('Z', '+00:00')).replace(tzinfo=None)
time_left = expiration_datetime - datetime.utcnow().replace(tzinfo=None)
Expand Down Expand Up @@ -167,7 +175,7 @@ def _chunked(lst: List, n: int) -> Generator[List, None, None]:
for stream_chunk in _chunked(filtered_streams, 5):
streams = "/".join(stream_chunk)
try:
response = get(f"{RD_BASE_URL}/torrents/instantAvailability/{streams}/", additional_headers=self.auth_headers, proxies=self.proxy, response_type=dict)
response = get(f"{RD_BASE_URL}/torrents/instantAvailability/{streams}/", additional_headers=self.auth_headers, proxies=self.proxy, response_type=dict, specific_rate_limiter=self.torrents_rate_limiter, overall_rate_limiter=self.overall_rate_limiter)
if response.is_ok and self._evaluate_stream_response(response.data, processed_stream_hashes, item):
return True
except Exception as e:
Expand Down Expand Up @@ -552,7 +560,9 @@ def add_magnet(self, item: MediaItem) -> str:
f"{RD_BASE_URL}/torrents/addMagnet",
{"magnet": f"magnet:?xt=urn:btih:{hash}&dn=&tr="},
additional_headers=self.auth_headers,
proxies=self.proxy
proxies=self.proxy,
specific_rate_limiter=self.torrents_rate_limiter,
overall_rate_limiter=self.overall_rate_limiter
)
if response.is_ok:
return response.data.id
Expand All @@ -571,7 +581,9 @@ def get_torrent_info(self, request_id: str) -> dict:
response = get(
f"{RD_BASE_URL}/torrents/info/{request_id}",
additional_headers=self.auth_headers,
proxies=self.proxy
proxies=self.proxy,
specific_rate_limiter=self.torrents_rate_limiter,
overall_rate_limiter=self.overall_rate_limiter
)
if response.is_ok:
return response.data
Expand All @@ -594,21 +606,24 @@ def select_files(self, request_id: str, item: MediaItem) -> bool:
f"{RD_BASE_URL}/torrents/selectFiles/{request_id}",
{"files": ",".join(files.keys())},
additional_headers=self.auth_headers,
proxies=self.proxy
proxies=self.proxy,
specific_rate_limiter=self.torrents_rate_limiter,
overall_rate_limiter=self.overall_rate_limiter
)
return response.is_ok
except Exception as e:
logger.error(f"Error selecting files for {item.log_string}: {e}")
return False


def get_torrents(self, limit: int) -> dict[str, SimpleNamespace]:
"""Get torrents from real-debrid.com"""
try:
response = get(
f"{RD_BASE_URL}/torrents?limit={str(limit)}",
additional_headers=self.auth_headers,
proxies=self.proxy
proxies=self.proxy,
specific_rate_limiter=self.torrents_rate_limiter,
overall_rate_limiter=self.overall_rate_limiter
)
if response.is_ok and response.data:
# Example response.data:
Expand Down Expand Up @@ -686,4 +701,4 @@ def check_season(season):
return True

logger.debug(f"No matching item found for {item.log_string}")
return False
return False
5 changes: 3 additions & 2 deletions backend/program/scrapers/annatar.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
from requests import ConnectTimeout, ReadTimeout
from requests.exceptions import RequestException
from utils.logger import logger
from utils.request import RateLimiter, RateLimitExceeded, get
from utils.request import get
from utils.ratelimiter import RateLimiter, RateLimitExceeded


class Annatar:
Expand Down Expand Up @@ -129,4 +130,4 @@ def api_scrape(self, item: MediaItem) -> tuple[Dict[str, str], int]:

torrents[stream.hash] = stream.title

return torrents, len(response.data.media)
return torrents, len(response.data.media)
4 changes: 2 additions & 2 deletions backend/program/scrapers/jackett.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from pydantic import BaseModel
from requests import HTTPError, ReadTimeout, RequestException, Timeout
from utils.logger import logger
from utils.request import RateLimiter, RateLimitExceeded
from utils.ratelimiter import RateLimiter, RateLimitExceeded


class JackettIndexer(BaseModel):
Expand Down Expand Up @@ -281,4 +281,4 @@ def _log_indexers(self) -> None:
if not indexer.movie_search_capabilities:
logger.debug(f"Movie search not available for {indexer.title}")
if not indexer.tv_search_capabilities:
logger.debug(f"TV search not available for {indexer.title}")
logger.debug(f"TV search not available for {indexer.title}")
7 changes: 4 additions & 3 deletions backend/program/scrapers/knightcrawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@
from requests import ConnectTimeout, ReadTimeout
from requests.exceptions import RequestException
from utils.logger import logger
from utils.request import RateLimiter, RateLimitExceeded, get, ping
from utils.request import get, ping
from utils.ratelimiter import RateLimiter, RateLimitExceeded


class Knightcrawler:
Expand Down Expand Up @@ -40,7 +41,7 @@ def validate(self) -> bool:
try:
url = f"{self.settings.url}/{self.settings.filter}/manifest.json"
response = ping(url=url, timeout=self.timeout)
if response.ok:
if response.is_ok:
return True
except Exception as e:
logger.error(f"Knightcrawler failed to initialize: {e}", )
Expand Down Expand Up @@ -114,4 +115,4 @@ def api_scrape(self, item: MediaItem) -> tuple[Dict[str, str], int]:

torrents[stream.infoHash] = raw_title

return torrents, len(response.data.streams)
return torrents, len(response.data.streams)
7 changes: 4 additions & 3 deletions backend/program/scrapers/mediafusion.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@
from requests import ConnectTimeout, ReadTimeout
from requests.exceptions import RequestException
from utils.logger import logger
from utils.request import RateLimiter, RateLimitExceeded, get, ping
from utils.request import get, ping
from utils.ratelimiter import RateLimiter, RateLimitExceeded


class Mediafusion:
Expand Down Expand Up @@ -87,7 +88,7 @@ def validate(self) -> bool:
try:
url = f"{self.settings.url}/manifest.json"
response = ping(url=url, timeout=self.timeout)
return response.ok
return response.is_ok
except Exception as e:
logger.error(f"Mediafusion failed to initialize: {e}")
return False
Expand Down Expand Up @@ -151,4 +152,4 @@ def api_scrape(self, item: MediaItem) -> tuple[Dict[str, str], int]:

torrents[info_hash] = raw_title

return torrents, len(response.data.streams)
return torrents, len(response.data.streams)
5 changes: 3 additions & 2 deletions backend/program/scrapers/orionoid.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@
from requests import ConnectTimeout, ReadTimeout
from requests.exceptions import RequestException
from utils.logger import logger
from utils.request import RateLimiter, RateLimitExceeded, get
from utils.request import get
from utils.ratelimiter import RateLimiter, RateLimitExceeded

KEY_APP = "D3CH6HMX9KD9EMD68RXRCDUNBDJV5HRR"

Expand Down Expand Up @@ -188,4 +189,4 @@ def api_scrape(self, item: MediaItem) -> tuple[Dict, int]:

torrents[stream.file.hash] = stream.file.name

return torrents, len(response.data.data.streams)
return torrents, len(response.data.data.streams)
4 changes: 2 additions & 2 deletions backend/program/scrapers/prowlarr.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from pydantic import BaseModel
from requests import HTTPError, ReadTimeout, RequestException, Timeout
from utils.logger import logger
from utils.request import RateLimiter, RateLimitExceeded
from utils.ratelimiter import RateLimiter, RateLimitExceeded


class ProwlarrIndexer(BaseModel):
Expand Down Expand Up @@ -284,4 +284,4 @@ def _log_indexers(self) -> None:
if not indexer.movie_search_capabilities:
logger.debug(f"Movie search not available for {indexer.title}")
if not indexer.tv_search_capabilities:
logger.debug(f"TV search not available for {indexer.title}")
logger.debug(f"TV search not available for {indexer.title}")
7 changes: 4 additions & 3 deletions backend/program/scrapers/torbox.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@
from RTN import RTN, Torrent, sort_torrents
from RTN.exceptions import GarbageTorrent
from utils.logger import logger
from utils.request import RateLimiter, RateLimitExceeded, get, ping
from utils.request import get, ping
from utils.ratelimiter import RateLimiter, RateLimitExceeded


class TorBoxScraper:
Expand Down Expand Up @@ -38,7 +39,7 @@ def validate(self) -> bool:

try:
response = ping(f"{self.base_url}/torrents/imdb:tt0944947?metadata=false&season=1&episode=1", timeout=self.timeout)
return response.ok
return response.is_ok
except Exception as e:
logger.exception(f"Error validating TorBox Scraper: {e}")
return False
Expand Down Expand Up @@ -118,4 +119,4 @@ def api_scrape(self, item: MediaItem) -> tuple[Dict[str, str], int]:

torrents[info_hash] = raw_title

return torrents, len(response.data.data.torrents)
return torrents, len(response.data.data.torrents)
7 changes: 4 additions & 3 deletions backend/program/scrapers/torrentio.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@
from requests import ConnectTimeout, ReadTimeout
from requests.exceptions import RequestException
from utils.logger import logger
from utils.request import RateLimiter, RateLimitExceeded, get, ping
from utils.request import get, ping
from utils.ratelimiter import RateLimiter, RateLimitExceeded


class Torrentio:
Expand Down Expand Up @@ -42,7 +43,7 @@ def validate(self) -> bool:
try:
url = f"{self.settings.url}/{self.settings.filter}/manifest.json"
response = ping(url=url, timeout=10)
if response.ok:
if response.is_ok:
return True
except Exception as e:
logger.error(f"Torrentio failed to initialize: {e}", )
Expand Down Expand Up @@ -130,4 +131,4 @@ def api_scrape(self, item: MediaItem) -> tuple[Dict[str, str], int]:
raw_title = stream_title.split("\n")[-1].split("/")[-1] if isinstance(item, Episode) else stream_title.split("\n")[0]
torrents[stream.infoHash] = raw_title

return torrents, len(response.data.streams)
return torrents, len(response.data.streams)
Loading

0 comments on commit 8824154

Please sign in to comment.