Skip to content

Commit

Permalink
Migrating to tpb api:
Browse files Browse the repository at this point in the history
- migrating tph module to use the api instead of scraping from the
frontend
- fixing user status bug in utils
  • Loading branch information
philhabell authored and philhabell committed Nov 17, 2020
1 parent a719999 commit 6c71fe2
Show file tree
Hide file tree
Showing 2 changed files with 30 additions and 31 deletions.
3 changes: 1 addition & 2 deletions we_get/core/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,13 +142,12 @@ def msg_item(
seeds = items['seeds']
target = items['target']
user_status = items.get('user_status', None)
user_status_text = ''
if user_status and user_status is not None:
if user_status == 'vip':
user_status_text = color(cset['user_status_vip'], user_status)
else:
color(cset['user_status'], user_status)
else:
user_status_text = ''

text = (
"%s %s [%s/%s] %s" % (
Expand Down
58 changes: 29 additions & 29 deletions we_get/modules/the_pirate_bay.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,19 @@

from we_get.core.module import Module
import re
import urllib
import json


API_URL = "https://apibay.org"
API_SEARCH_LOC = "/q.php?q="
ALI_LIST_LOC = "/precompiled/data_top100_all.json"
API_SFW_FILTER = "&cat=100,200,300,400,600"
API_TRACKERS = "&tr=udp%3A%2F%2Ftracker.coppersurfer.tk%3A6969%2Fannounce&tr=udp%3A%2F%2F9.rarbg.me%3A2850%2Fannounce&tr=udp%3A%2F%2F9.rarbg.to%3A2920%2Fannounce&tr=udp%3A%2F%2Ftracker.opentrackr.org%3A1337&tr=udp%3A%2F%2Ftracker.leechers-paradise.org%3A6969%2Fannounce"

BASE_URL = "https://www1.thepiratebay3.to"
SEARCH_LOC = "/s/?q="
LIST_LOC = "/top/all"
SFW_FILTER = "&audio=on&video=on&apps=on&games=on&other=on&category=0"

class the_pirate_bay(object):
""" the_pirate_bay module for we-get.
"""
"""the_pirate_bay module for we-get."""

def __init__(self, pargs):
self.links = None
Expand All @@ -30,41 +34,37 @@ def parse_pargs(self):
for opt in self.pargs:
if opt == "--search":
self.action = "search"
self.search_query = self.pargs[opt][0].replace(' ', '-')
self.search_query = self.pargs[opt][0].replace(" ", "-")
elif opt == "--list":
self.action = "list"
if opt == "--sfw":
self.filter = SFW_FILTER
self.filter = API_SFW_FILTER

def _parse_data(self, data):
soup = BeautifulSoup(data, 'html.parser')
items = soup.find_all("tr")
seeds = None
leeches = None
magnet = None
def generate_magnet(self, data):
return f"magnet:?xt=urn:btih:{data['info_hash']}&dn={urllib.parse.quote(data['name'])}{API_TRACKERS}"

for item in items:
cols = item.find_all("td")
if len(cols) > 1:
name = cols[1].a.contents[0]
magnet = cols[3].a['href']
seeds = cols[5].contents[0]
leeches = cols[6].contents[0]
user_status = None
self.items.update({
name: {
'seeds': seeds, 'leeches': leeches,
'link': magnet, 'user_status': user_status}
})
def _parse_data(self, data):
for row in json.loads(data):
print(row)
self.items.update(
{
row["name"]: {
"seeds": row["seeders"],
"leeches": row["leechers"],
"link": self.generate_magnet(row),
"user_status": row["status"],
}
}
)

def search(self):
url = f"{BASE_URL}{SEARCH_LOC}{self.search_query}{self.filter}"
url = f"{API_URL}{API_SEARCH_LOC}{self.search_query}{self.filter}"
data = self.module.http_get_request(url)
self._parse_data(data)
return self.items

def list(self):
url = f"{BASE_URL}{LIST_LOC}"
url = f"{API_URL}{ALI_LIST_LOC}"
data = self.module.http_get_request(url)
self._parse_data(data)
return self.items
Expand Down

0 comments on commit 6c71fe2

Please sign in to comment.