Skip to content

Commit

Permalink
Feature/update abnormal (#9721)
Browse files Browse the repository at this point in the history
* Fix provider abnormal

* providers: Fix saving password

* unused import

* update changelog
  • Loading branch information
p0psicles authored Jul 9, 2021
1 parent 8bc81b0 commit 4c8f153
Show file tree
Hide file tree
Showing 3 changed files with 31 additions and 28 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@

#### Fixes
- Fix prowl notifications ([9720](https://github.com/pymedusa/Medusa/pull/9720))
- Fix provider ABNormal ([9721](https://github.com/pymedusa/Medusa/pull/9721))
- Fix saving provider password ([9721](https://github.com/pymedusa/Medusa/pull/9721))

-----

Expand Down
51 changes: 23 additions & 28 deletions medusa/providers/torrent/html/abnormal.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,7 @@

from medusa import tv
from medusa.bs4_parser import BS4Parser
from medusa.helper.common import (
convert_size,
try_int,
)
from medusa.helper.common import convert_size
from medusa.logger.adapters.style import BraceAdapter
from medusa.providers.torrent.torrent_provider import TorrentProvider

Expand All @@ -35,10 +32,10 @@ def __init__(self):
self.password = None

# URLs
self.url = 'https://abnormal.ws'
self.url = 'https://abn.lol'
self.urls = {
'login': urljoin(self.url, 'login.php'),
'search': urljoin(self.url, 'torrents.php'),
'login': urljoin(self.url, 'Home/Login'),
'search': urljoin(self.url, 'Torrent'),
}

# Proper Strings
Expand All @@ -64,18 +61,9 @@ def search(self, search_strings, age=0, ep_obj=None, **kwargs):

# Search Params
search_params = {
'cat[]': [
'TV|SD|VOSTFR',
'TV|HD|VOSTFR',
'TV|SD|VF',
'TV|HD|VF',
'TV|PACK|FR',
'TV|PACK|VOSTFR',
'TV|EMISSIONS',
'ANIME',
],
'order': 'Time', # Sorting: Available parameters: ReleaseName, Seeders, Leechers, Snatched, Size
'way': 'DESC', # Both ASC and DESC are available for sort direction
'SelectedCats': '1', # "Series" category
'SortOn': 'Created', # Sorting: Available parameters: ReleaseName, Seeders, Leechers, Snatched, Size
'SortOrder': 'DESC', # Both ASC and DESC are available for sort direction
}

for mode in search_strings:
Expand Down Expand Up @@ -113,7 +101,7 @@ def parse(self, data, mode):
items = []

with BS4Parser(data, 'html5lib') as html:
torrent_table = html.find(class_='torrent_table')
torrent_table = html.find(class_='table-rows')
torrent_rows = torrent_table('tr') if torrent_table else []

# Continue only if at least one release is found
Expand All @@ -122,7 +110,7 @@ def parse(self, data, mode):
return items

# Catégorie, Release, Date, DL, Size, C, S, L
labels = [label.get_text(strip=True) for label in torrent_rows[0]('td')]
labels = [label.get_text(strip=True) for label in torrent_rows[0]('th')]

# Skip column headers
for row in torrent_rows[1:]:
Expand All @@ -132,13 +120,13 @@ def parse(self, data, mode):

try:
title = cells[labels.index('Release')].get_text(strip=True)
download = cells[labels.index('DL')].find('a', class_='tooltip')['href']
download = cells[labels.index('DL')].find('a')['href']
download_url = urljoin(self.url, download)
if not all([title, download_url]):
continue

seeders = try_int(cells[labels.index('S')].get_text(strip=True))
leechers = try_int(cells[labels.index('L')].get_text(strip=True))
seeders = int(cells[labels.index('S')].get_text(strip=True))
leechers = int(cells[labels.index('L')].get_text(strip=True))

# Filter unseeded torrent
if seeders < self.minseed:
Expand All @@ -150,7 +138,7 @@ def parse(self, data, mode):

size_index = labels.index('Size') if 'Size' in labels else labels.index('Taille')
torrent_size = cells[size_index].get_text()
size = convert_size(torrent_size, units=units) or -1
size = convert_size(torrent_size.replace(',', '.'), units=units) or -1

item = {
'title': title,
Expand All @@ -175,17 +163,24 @@ def login(self):
if any(dict_from_cookiejar(self.session.cookies).values()):
return True

# Retrieve __RequestVerificationToken
login_html = self.session.get(self.urls['login'])
with BS4Parser(login_html.text, 'html5lib') as html:
token = html.find('input', attrs={'name': '__RequestVerificationToken'}).get('value')

login_params = {
'username': self.username,
'password': self.password,
'Username': self.username,
'Password': self.password,
'__RequestVerificationToken': token,
'RememberMe': True
}

response = self.session.post(self.urls['login'], data=login_params)
if not response or not response.text:
log.warning('Unable to connect to provider')
return False

if "Votre nom d'utilisateur ou mot de passe est incorrect." in response.text:
if 'Erreur lors du login.' in response.text:
log.warning('Invalid username or password. Check your settings')
return False

Expand Down
6 changes: 6 additions & 0 deletions medusa/server/api/v2/providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -425,6 +425,12 @@ def _set_common_settings(provider, config):
except (AttributeError, KeyError):
provider.username = None

if hasattr(provider, 'password'):
try:
provider.password = config['password']
except (AttributeError, KeyError):
provider.password = None

if hasattr(provider, 'api_key'):
try:
provider.api_key = config['apikey']
Expand Down

0 comments on commit 4c8f153

Please sign in to comment.