Skip to content

Commit

Permalink
chore: additional state transition fixes, add logging
Browse files Browse the repository at this point in the history
  • Loading branch information
Gaisberg authored and Gaisberg committed Aug 2, 2024
1 parent 755a1bf commit 869ff7b
Show file tree
Hide file tree
Showing 4 changed files with 19 additions and 9 deletions.
2 changes: 0 additions & 2 deletions src/controllers/items.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,8 +187,6 @@ async def retry_items(
for item in items:
request.app.program._remove_from_running_items(item)
request.app.program.add_to_queue(item)
item.store_state()
session.commit()

return {"success": True, "message": f"Retried items with id {ids}"}

Expand Down
3 changes: 3 additions & 0 deletions src/controllers/ws.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import json
from loguru import logger
from fastapi import APIRouter, WebSocket, WebSocketDisconnect

router = APIRouter(
Expand All @@ -12,10 +13,12 @@ def __init__(self):

async def connect(self, websocket: WebSocket):
await websocket.accept()
logger.debug("Frontend connected!")
self.active_connections.append(websocket)
await websocket.send_json({"type": "health", "status": "running"})

def disconnect(self, websocket: WebSocket):
logger.debug("Frontend disconnected!")
self.active_connections.remove(websocket)

async def send_personal_message(self, message: str, websocket: WebSocket):
Expand Down
13 changes: 11 additions & 2 deletions src/program/downloaders/realdebrid.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ def run(self, item: MediaItem) -> bool:
file_dict = self.is_cached(item, [stream.infohash], True)
if file_dict:
logger.log(
"DEBRID", f"Item hash cached containers, proceeding with: {item.log_string}"
"DEBRID", f"Item has cached containers, proceeding with: {item.log_string}"
)
self.download(item, stream.infohash, file_dict[stream.infohash])
return_value = True
Expand Down Expand Up @@ -165,11 +165,20 @@ def download(self, item: MediaItem, stream: str, container) -> None:

def download_cached(self, item: MediaItem, stream: str) -> None:
""" I assume that the item has been checked for cached files before calling this function """
added_magnet = False
torrent_id = self.torrent_is_downloaded(item, stream)
if not torrent_id:
torrent_id = self.add_magnet(stream)
time.sleep(1)
added_magnet = True
info = self.get_torrent_info(torrent_id)
files = [{"filename": Path(file["path"]).name, "filesize": file["bytes"], 'id': file["id"]} for file in info["files"]]
container = self.finder.find_required_files(item, files)
self.download(item, stream, container)
if added_magnet:
self.select_files(torrent_id, container)
self.set_active_files(item, torrent_id, container=container)
self.active_stream = stream
logger.log("DEBUG", f"Downloaded {item.log_string}")

def torrent_is_downloaded(self, item, hash_key) -> int:
"""Check if item is already downloaded after checking if it was cached
Expand Down
10 changes: 5 additions & 5 deletions src/program/media/item.py
Original file line number Diff line number Diff line change
Expand Up @@ -361,17 +361,17 @@ def get_season_index_by_id(self, item_id):
def _determine_state(self):
if all(season.state == States.Completed for season in self.seasons):
return States.Completed
if any(
season.state in (States.Completed, States.PartiallyCompleted)
for season in self.seasons
):
return States.PartiallyCompleted
if all(season.state == States.Symlinked for season in self.seasons):
return States.Symlinked
if all(season.state == States.Downloaded for season in self.seasons):
return States.Downloaded
if self.is_scraped():
return States.Scraped
if any(
season.state in (States.Completed, States.PartiallyCompleted)
for season in self.seasons
):
return States.PartiallyCompleted
if any(season.state == States.Indexed for season in self.seasons):
return States.Indexed
if any(season.state == States.Requested for season in self.seasons):
Expand Down

0 comments on commit 869ff7b

Please sign in to comment.