Skip to content

Commit

Permalink
feat: improved ui (rivenmedia#422)
Browse files Browse the repository at this point in the history
* testing

* testing

* minor fixes

* feat: some frontend ui changes :)

* minor tweaks

* added movies and top movies section

* nearly completed homepage

* fix: fix settings and improvements to homepage

* chore: update app name to Riven and fix page header in library and onboarding

* feat: switch to vaul-svelte dependency for mobile ui and improvements to ui in general

* chore: update app name to Riven, improve UI consistency and changes to items endpoint

* feat: Add incomplete items to statistics page

* feat: Add services status to statistics page

feat: add lazy loading for images in statistics and home pages (rivenmedia#502)

fix: min/max filesize being returned undefined
fix: minor ui improvements (rivenmedia#503)

* fix: minor ui improvements

* chore: formatted files
fix: lower the z index and increase z index of header (rivenmedia#504)

feat: add top rated section (rivenmedia#505)

* feat: add top rated section

* chore: format files
fix: text color on light theme (rivenmedia#506)

chore(deps-dev): bump vitest from 1.6.0 to 2.0.1 in /frontend (rivenmedia#498)

Bumps [vitest](https://github.com/vitest-dev/vitest/tree/HEAD/packages/vitest) from 1.6.0 to 2.0.1.
- [Release notes](https://github.com/vitest-dev/vitest/releases)
- [Commits](https://github.com/vitest-dev/vitest/commits/v2.0.1/packages/vitest)

---
updated-dependencies:
- dependency-name: vitest
  dependency-type: direct:development
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <[email protected]>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Rate limiting for RD

- Change requests models to take through an optional instance of the rate limiter class
- Implement rate limiter settings for RD, a Conservative 100 calls per 60 seconds as default
- Ensure if enabled the rate limiter is initialised within the RD class
- Ensure all Rd calls (apart from ping) use the initialised rate limiter if present in requests

Add RD rate limit settings to general settings pane - frontend

Ignore mac and rider specific settings

remove config from frontend - this is internal now, non configurable

remove config, and use oll the time

1 per second

Split out rate limiter to seprate file, also work in 429's and handle overall rate limiting and endpoint specific ratelimiting

Use get for ping, and pass rate limiters

Added a user agent factory, and randomiser in requests

Updated zilean scraper to filtered endpoint

Only on movies. Aired at wont be right for seasons, we need a better way to do this across the board
  • Loading branch information
iPromKnight committed Jul 13, 2024
1 parent 4b8a7bf commit 317e204
Show file tree
Hide file tree
Showing 68 changed files with 4,757 additions and 2,700 deletions.
8 changes: 7 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -49,4 +49,10 @@ env/
venv/
ENV/
env.bak/
venv.bak/
venv.bak/

# Rider IDE
**/.idea/

# MacOs
**/.DS_Store
177 changes: 102 additions & 75 deletions backend/controllers/items.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from datetime import datetime
from enum import Enum
from typing import List, Optional

import Levenshtein
Expand Down Expand Up @@ -29,41 +30,30 @@ async def get_states():
}


@router.get("/", summary="Retrieve Media Items", description="Fetch media items with optional filters and pagination.")
@router.get(
"",
summary="Retrieve Media Items",
description="Fetch media items with optional filters and pagination",
)
async def get_items(
request: Request,
fetch_all: Optional[bool] = False,
limit: Optional[int] = 20,
limit: Optional[int] = 50,
page: Optional[int] = 1,
search: Optional[str] = None,
type: Optional[str] = None,
state: Optional[str] = None,
type: Optional[str] = None
sort: Optional[str] = "desc",
search: Optional[str] = None,
):
"""
Fetch media items with optional filters and pagination.
Parameters:
- request: Request object
- fetch_all: Fetch all items without pagination (default: False)
- limit: Number of items per page (default: 20)
- page: Page number (default: 1)
- search: Search term to filter items by title, IMDb ID, or item ID
- state: Filter items by state
- type: Filter items by type (movie, show, season, episode)
Returns:
- JSON response with success status, items, pagination details, and total count
Examples:
- Fetch all items: /items?fetch_all=true
- Fetch first 10 items: /items?limit=10&page=1
- Search items by title: /items?search=inception
- Filter items by state: /items?state=completed
- Filter items by type: /items?type=movie
"""
if page < 1:
raise HTTPException(status_code=400, detail="Page number must be 1 or greater.")

if limit < 1:
raise HTTPException(status_code=400, detail="Limit must be 1 or greater.")

items = list(request.app.program.media_items._items.values())
total_items = len(items)

if search:
if search: # TODO: fix for search
search_lower = search.lower()
filtered_items = []
if search_lower.startswith("tt"):
Expand All @@ -75,25 +65,20 @@ async def get_items(
else:
for item in items:
if isinstance(item, MediaItem):
title_match = item.title and Levenshtein.distance(search_lower, item.title.lower()) <= 0.90
imdb_match = item.imdb_id and Levenshtein.distance(search_lower, item.imdb_id.lower()) <= 1
title_match = (
item.title
and Levenshtein.distance(search_lower, item.title.lower())
<= 0.90
)
imdb_match = (
item.imdb_id
and Levenshtein.distance(search_lower, item.imdb_id.lower())
<= 1
)
if title_match or imdb_match:
filtered_items.append(item)
items = filtered_items

if type:
type_lower = type.lower()
if type_lower == "movie":
items = list(request.app.program.media_items.movies.values())
elif type_lower == "show":
items = list(request.app.program.media_items.shows.values())
elif type_lower == "season":
items = list(request.app.program.media_items.seasons.values())
elif type_lower == "episode":
items = list(request.app.program.media_items.episodes.values())
else:
raise HTTPException(status_code=400, detail=f"Invalid type: {type}. Valid types are: ['movie', 'show', 'season', 'episode']")

if state:
filter_lower = state.lower()
filter_state = None
Expand All @@ -105,28 +90,56 @@ async def get_items(
items = [item for item in items if item.state == filter_state]
else:
valid_states = [state.name for state in States]
raise HTTPException(status_code=400, detail=f"Invalid filter state: {state}. Valid states are: {valid_states}")
raise HTTPException(
status_code=400,
detail=f"Invalid filter state: {state}. Valid states are: {valid_states}",
)

if not fetch_all:
if page < 1:
raise HTTPException(status_code=400, detail="Page number must be 1 or greater.")
if limit < 1:
raise HTTPException(status_code=400, detail="Limit must be 1 or greater.")

start = (page - 1) * limit
end = start + limit
items = items[start:end]
if type:
type_lower = type.lower()
if type_lower == "movie":
items = list(request.app.program.media_items.movies.values())
total_items = len(items)
elif type_lower == "show":
items = list(request.app.program.media_items.shows.values())
total_items = len(items)
elif type_lower == "season":
items = list(request.app.program.media_items.seasons.values())
total_items = len(items)
elif type_lower == "episode":
items = list(request.app.program.media_items.episodes.values())
total_items = len(items)
else:
raise HTTPException(
status_code=400,
detail=f"Invalid type: {type}. Valid types are: ['movie', 'show', 'season', 'episode']",
)

if (
sort and not search
): # we don't want to sort search results as they are already sorted by relevance
if sort.lower() == "asc":
items = sorted(items, key=lambda x: x.requested_at)
elif sort.lower() == "desc":
items = sorted(items, key=lambda x: x.requested_at, reverse=True)
else:
raise HTTPException(
status_code=400,
detail=f"Invalid sort: {sort}. Valid sorts are: ['asc', 'desc']",
)

total_count = len(items)
total_pages = (total_count + limit - 1) // limit
start = (page - 1) * limit
end = start + limit
items = items[start:end]
total_pages = (total_items + limit - 1) // limit

return {
"success": True,
"items": [item.to_dict() for item in items],
"page": page,
"limit": limit,
"total": total_count,
"total_pages": total_pages
"total_items": total_items,
"total_pages": total_pages,
}


Expand All @@ -144,10 +157,14 @@ async def get_extended_item_info(request: Request, item_id: str):

@router.post("/add/imdb/{imdb_id}")
@router.post("/add/imdb/")
async def add_items(request: Request, imdb_id: Optional[str] = None, imdb_ids: Optional[IMDbIDs] = None):
async def add_items(
request: Request, imdb_id: Optional[str] = None, imdb_ids: Optional[IMDbIDs] = None
):
if imdb_id:
imdb_ids = IMDbIDs(imdb_ids=[imdb_id])
elif not imdb_ids or not imdb_ids.imdb_ids or any(not id for id in imdb_ids.imdb_ids):
elif (
not imdb_ids or not imdb_ids.imdb_ids or any(not id for id in imdb_ids.imdb_ids)
):
raise HTTPException(status_code=400, detail="No IMDb ID(s) provided")

valid_ids = []
Expand All @@ -163,21 +180,21 @@ async def add_items(request: Request, imdb_id: Optional[str] = None, imdb_ids: O
for id in valid_ids:
item = MediaItem({"imdb_id": id, "requested_by": "riven"})
request.app.program.add_to_queue(item)

return {"success": True, "message": f"Added {len(valid_ids)} item(s) to the queue"}


@router.delete("/remove/")
async def remove_item(
request: Request,
item_id: Optional[str] = None,
imdb_id: Optional[str] = None
request: Request, item_id: Optional[str] = None, imdb_id: Optional[str] = None
):
if item_id:
item = request.app.program.media_items.get(item_id)
id_type = "ID"
elif imdb_id:
item = next((i for i in request.app.program.media_items if i.imdb_id == imdb_id), None)
item = next(
(i for i in request.app.program.media_items if i.imdb_id == imdb_id), None
)
id_type = "IMDb ID"
else:
raise HTTPException(status_code=400, detail="No item ID or IMDb ID provided")
Expand All @@ -186,7 +203,7 @@ async def remove_item(
logger.error(f"Item with {id_type} {item_id or imdb_id} not found")
return {
"success": False,
"message": f"Item with {id_type} {item_id or imdb_id} not found. No action taken."
"message": f"Item with {id_type} {item_id or imdb_id} not found. No action taken.",
}

try:
Expand All @@ -197,22 +214,33 @@ async def remove_item(
# Remove the symlinks associated with the item
symlinker = request.app.program.service[Symlinker]
symlinker.delete_item_symlinks(item)
logger.log("API", f"Removed symlink for item with {id_type} {item_id or imdb_id}")
logger.log(
"API", f"Removed symlink for item with {id_type} {item_id or imdb_id}"
)

# Save and reload the media items to ensure consistency
symlinker.save_and_reload_media_items(request.app.program.media_items)
logger.log("API", f"Saved and reloaded media items after removing item with {id_type} {item_id or imdb_id}")
logger.log(
"API",
f"Saved and reloaded media items after removing item with {id_type} {item_id or imdb_id}",
)

return {
"success": True,
"message": f"Successfully removed item with {id_type} {item_id or imdb_id}."
"message": f"Successfully removed item with {id_type} {item_id or imdb_id}.",
}
except Exception as e:
logger.error(f"Failed to remove item with {id_type} {item_id or imdb_id}: {e}")
raise HTTPException(status_code=500, detail="Internal server error")


@router.get("/imdb/{imdb_id}")
async def get_imdb_info(request: Request, imdb_id: str, season: Optional[int] = None, episode: Optional[int] = None):
async def get_imdb_info(
request: Request,
imdb_id: str,
season: Optional[int] = None,
episode: Optional[int] = None,
):
"""
Get the item with the given IMDb ID.
If the season and episode are provided, get the item with the given season and episode.
Expand All @@ -232,18 +260,17 @@ async def get_imdb_info(request: Request, imdb_id: str, season: Optional[int] =

@router.get("/incomplete")
async def get_incomplete_items(request: Request):
if not hasattr(request.app, 'program') or not hasattr(request.app.program, 'media_items'):
if not hasattr(request.app, "program") or not hasattr(
request.app.program, "media_items"
):
logger.error("Program or media_items not found in the request app")
raise HTTPException(status_code=500, detail="Internal server error")

incomplete_items = request.app.program.media_items.get_incomplete_items()
if not incomplete_items:
return {
"success": True,
"incomplete_items": []
}
return {"success": True, "incomplete_items": []}

return {
"success": True,
"incomplete_items": [item.to_dict() for item in incomplete_items.values()]
"incomplete_items": [item.to_dict() for item in incomplete_items.values()],
}
8 changes: 4 additions & 4 deletions backend/program/content/listrr.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,11 +49,11 @@ def validate(self) -> bool:
return False
try:
response = ping("https://listrr.pro/", additional_headers=self.headers)
if not response.ok:
if not response.is_ok:
logger.error(
f"Listrr ping failed - Status Code: {response.status_code}, Reason: {response.reason}",
f"Listrr ping failed - Status Code: {response.status_code}, Reason: {response.response.reason}",
)
return response.ok
return response.is_ok
except Exception as e:
logger.error(f"Listrr ping exception: {e}")
return False
Expand Down Expand Up @@ -102,4 +102,4 @@ def _get_items_from_Listrr(self, content_type, content_lists) -> list[MediaItem]
logger.error(f"An error occurred: {e}")
break
page += 1
return list(unique_ids)
return list(unique_ids)
7 changes: 4 additions & 3 deletions backend/program/content/mdblist.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
from program.media.item import MediaItem
from program.settings.manager import settings_manager
from utils.logger import logger
from utils.request import RateLimiter, RateLimitExceeded, get, ping
from utils.request import get, ping
from utils.ratelimiter import RateLimiter, RateLimitExceeded


class Mdblist:
Expand Down Expand Up @@ -33,7 +34,7 @@ def validate(self):
logger.error("Mdblist is enabled, but list is empty.")
return False
response = ping(f"https://mdblist.com/api/user?apikey={self.settings.api_key}")
if "Invalid API key!" in response.text:
if "Invalid API key!" in response.response.text:
logger.error("Mdblist api key is invalid.")
return False
return True
Expand Down Expand Up @@ -91,4 +92,4 @@ def list_items_by_url(url: str, api_key: str):
url = url if url.endswith("/") else f"{url}/"
url = url if url.endswith("json/") else f"{url}json/"
response = get(url, params={"apikey": api_key})
return response.data
return response.data
6 changes: 3 additions & 3 deletions backend/program/content/overseerr.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,10 +40,10 @@ def validate(self) -> bool:
)
if response.status_code >= 201:
logger.error(
f"Overseerr ping failed - Status Code: {response.status_code}, Reason: {response.reason}"
f"Overseerr ping failed - Status Code: {response.status_code}, Reason: {response.response.reason}"
)
return False
return response.ok
return response.is_ok
except (ConnectionError, RetryError, MaxRetryError, NewConnectionError) as e:
logger.error(f"Overseerr URL is not reachable, or it timed out")
return False
Expand Down Expand Up @@ -223,4 +223,4 @@ def mark_completed(mediaId: int) -> bool:
# 2 = PENDING,
# 3 = PROCESSING,
# 4 = PARTIALLY_AVAILABLE,
# 5 = AVAILABLE
# 5 = AVAILABLE
4 changes: 2 additions & 2 deletions backend/program/content/plex_watchlist.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def validate(self):
for rss_url in self.settings.rss:
try:
response = ping(rss_url)
response.raise_for_status()
response.response.raise_for_status()
self.rss_enabled = True
return True
except HTTPError as e:
Expand Down Expand Up @@ -121,4 +121,4 @@ def _extract_imdb_ids(self, guids):
if guid.startswith("imdb://"):
imdb_id = guid.split("//")[-1]
if imdb_id:
yield imdb_id
yield imdb_id
Loading

0 comments on commit 317e204

Please sign in to comment.