Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add AniDex torrent provider #2700

Merged
merged 4 commits into from
May 7, 2017
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion medusa/providers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
from .torrent import (
abnormal,
alpharatio,
anidex,
animebytes,
animetorrents,
bitcannon,
Expand All @@ -41,6 +42,7 @@
hdbits,
hdspace,
hdtorrents,
horriblesubs,
hounddawgs,
iptorrents,
limetorrents,
Expand Down Expand Up @@ -77,7 +79,7 @@
'alpharatio', 'sdbits', 'shazbat', 'rarbg', 'tntvillage', 'binsearch', 'xthor', 'abnormal', 'scenetime',
'nebulance', 'tvchaosuk', 'torrentproject', 'extratorrent', 'bitcannon', 'torrentz2', 'pretome', 'gftracker',
'hdspace', 'newpct', 'elitetorrent', 'danishbits', 'hd4free', 'limetorrents', 'norbits', 'anizb',
'bithdtv', 'zooqle', 'animebytes', 'animetorrents'
'bithdtv', 'zooqle', 'animebytes', 'animetorrents', 'horriblesubs', 'anidex'
]


Expand Down
4 changes: 3 additions & 1 deletion medusa/providers/torrent/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from .html import (
abnormal,
alpharatio,
anidex,
animebytes,
animetorrents,
bithdtv,
Expand All @@ -13,6 +14,7 @@
gftracker,
hdspace,
hdtorrents,
horriblesubs,
hounddawgs,
iptorrents,
limetorrents,
Expand Down Expand Up @@ -63,5 +65,5 @@
'newpct', 'pretome', 'sdbits', 'scc', 'scenetime', 'speedcd', 'thepiratebay', 'tntvillage', 'tokyotoshokan',
'torrentbytes', 'torrentleech', 'nebulance', 'tvchaosuk', 'xthor', 'zooqle', 'bitcannon', 'btn', 'hd4free',
'hdbits', 'norbits', 'rarbg', 't411', 'torrentday', 'torrentproject', 'nyaatorrents', 'rsstorrent', 'shazbat',
'torrentz2', 'animetorrents'
'torrentz2', 'animetorrents', 'horriblesubs', 'anidex'
]
163 changes: 163 additions & 0 deletions medusa/providers/torrent/html/anidex.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,163 @@
# coding=utf-8

"""Provider code for AniDex."""

from __future__ import unicode_literals

import logging
import traceback

from dateutil import parser

from medusa import tv
from medusa.bs4_parser import BS4Parser
from medusa.helper.common import convert_size
from medusa.logger.adapters.style import BraceAdapter
from medusa.providers.torrent.torrent_provider import TorrentProvider

from requests.compat import urljoin

log = BraceAdapter(logging.getLogger(__name__))
log.logger.addHandler(logging.NullHandler())


class AniDexProvider(TorrentProvider):
"""AniDex Torrent provider."""

def __init__(self):
"""Initialize the class."""
super(self.__class__, self).__init__('AniDex')

# Credentials
self.public = True

# URLs
self.url = 'https://anidex.info'
self.urls = {
'search': urljoin(self.url, '/ajax/page.ajax.php'),
}

# Miscellaneous Options
self.headers = {
'X-Requested-With': 'XMLHttpRequest',
}

# Torrent Stats
self.minseed = None
self.minleech = None

# Cache
self.cache = tv.Cache(self, min_time=20)

def search(self, search_strings, age=0, ep_obj=None):
"""
Search a provider and parse the results.
:param search_strings: A dict with mode (key) and the search value (value)
:param age: Not used
:param ep_obj: Not used
:returns: A list of search results (structure)
"""
results = []

search_params = {
'page': 'torrents',
'category': 0,
'filename': '',
'limit': 50,
'offset': 0,
}

for mode in search_strings:
log.debug('Search mode: {0}', mode)

for search_string in search_strings[mode]:

if mode != 'RSS':
log.debug('Search string: {search}',
{'search': search_string})

search_params.update({'filename': '{0}'.format(search_string)})

response = self.get_url(self.urls['search'], params=search_params, returns='response')
if not response or not response.text:
log.debug('No data returned from provider')
continue

results += self.parse(response.text, mode)

return results

def parse(self, data, mode):
"""
Parse search results for items.
:param data: The raw response from a search
:param mode: The current mode used to search, e.g. RSS
:return: A list of items found
"""
items = []

with BS4Parser(data, 'html5lib') as html:
table_header = html.find('thead')

# Continue only if at least one release is found
if not table_header:
log.debug('Data returned from provider does not contain any torrents')
return items

table_spans = table_header.find_all('span')
# Skip 'Likes' to have the same amount of cells and labels
labels = [label.get('title') for label in table_spans if label.get('title') != 'Likes']

torrent_rows = html.find('tbody').find_all('tr')
for row in torrent_rows:
cells = row.find_all('td')

try:
title = cells[labels.index('Filename')].span.get_text()
download_url = cells[labels.index('Torrent')].a.get('href')
if not all([title, download_url]):
continue

download_url = urljoin(self.url, download_url)

seeders = cells[labels.index('Seeders')].get_text()
leechers = cells[labels.index('Leechers')].get_text()

# Filter unseeded torrent
if seeders < min(self.minseed, 1):
if mode != 'RSS':
log.debug("Discarding torrent because it doesn't meet the"
" minimum seeders: {0}. Seeders: {1}",
title, seeders)
continue

torrent_size = cells[labels.index('File size')].get_text()
size = convert_size(torrent_size) or -1

date = cells[labels.index('Age')].get('title')
pubdate = parser.parse(date)

item = {
'title': title,
'link': download_url,
'size': size,
'seeders': seeders,
'leechers': leechers,
'pubdate': pubdate,
}
if mode != 'RSS':
log.debug('Found result: {0} with {1} seeders and {2} leechers',
title, seeders, leechers)

items.append(item)
except (AttributeError, TypeError, KeyError, ValueError, IndexError):
log.error('Failed parsing provider. Traceback: {0!r}',
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I prefer to use log.exception() in these situations. @labrys what you think?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Agreed. Its much cleaner to do log.exception('Failed parsing provider').

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It's the same for me, but we should decide what to use. I saw @labrys still replacing it with log.error?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

For my replaces its mostly just mass replaces with regex to deprecate the old brace message format so we can remove it. Not spending much effort on content of messages.

traceback.format_exc())

return items


provider = AniDexProvider()
Binary file added static/images/providers/anidex.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.