Skip to content

Commit

Permalink
Fix Speedcd parsing and add pubdate (#5190)
Browse files Browse the repository at this point in the history
* Fix Speedcd parsing and add pubdate

* Add missing RERIP

* Update CHANGELOG.md
  • Loading branch information
medariox authored and p0psicles committed Sep 11, 2018
1 parent 272e9b4 commit dd8ba3b
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 9 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,11 @@
- Updated the API v2 endpoint to handle concurrent requests ([#4970](https://github.com/pymedusa/Medusa/pull/4970))
- Converted some of the show header to Vue ([#5087](https://github.com/pymedusa/Medusa/pull/5087))
- Converted "Add Show" options into a Vue SFC ([#4848](https://github.com/pymedusa/Medusa/pull/4848))
- Added publishing date to Speed.CD provider ([#5190](https://github.com/pymedusa/Medusa/pull/5190))

#### Fixes
- Fixed many release name parsing issues as a result of updating `guessit` ([#4244](https://github.com/pymedusa/Medusa/pull/4244))
- Fixed Speed.CD provider exception during searches ([#5190](https://github.com/pymedusa/Medusa/pull/5190))

-----

Expand Down
18 changes: 9 additions & 9 deletions medusa/providers/torrent/html/speedcd.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def __init__(self):
}

# Proper Strings
self.proper_strings = ['PROPER', 'REPACK', 'REAL']
self.proper_strings = ['PROPER', 'REPACK', 'REAL', 'RERIP']

# Miscellaneous Options
self.freeleech = False
Expand Down Expand Up @@ -109,9 +109,6 @@ def parse(self, data, mode):
:return: A list of items found
"""
# Units
units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB']

items = []

with BS4Parser(data, 'html5lib') as html:
Expand All @@ -129,14 +126,14 @@ def parse(self, data, mode):
cells = row('td')

try:
title = cells[1].find('a', class_='torrent').get_text()
title = cells[1].find('a').get_text()
download_url = urljoin(self.url,
cells[2].find(title='Download').parent['href'])
if not all([title, download_url]):
continue

seeders = try_int(cells[5].get_text(strip=True))
leechers = try_int(cells[6].get_text(strip=True))
seeders = try_int(cells[6].get_text(strip=True))
leechers = try_int(cells[7].get_text(strip=True))

# Filter unseeded torrent
if seeders < min(self.minseed, 1):
Expand All @@ -148,15 +145,18 @@ def parse(self, data, mode):

torrent_size = cells[4].get_text()
torrent_size = torrent_size[:-2] + ' ' + torrent_size[-2:]
size = convert_size(torrent_size, units=units) or -1
size = convert_size(torrent_size) or -1

pubdate_raw = cells[1].find('span', class_='elapsedDate').get_text()
pubdate = self.parse_pubdate(pubdate_raw, human_time=True)

item = {
'title': title,
'link': download_url,
'size': size,
'seeders': seeders,
'leechers': leechers,
'pubdate': None,
'pubdate': pubdate,
}
if mode != 'RSS':
log.debug('Found result: {0} with {1} seeders and {2} leechers',
Expand Down

0 comments on commit dd8ba3b

Please sign in to comment.