Skip to content
This repository has been archived by the owner on Mar 12, 2022. It is now read-only.

Commit

Permalink
fix: could not claim league loot
Browse files Browse the repository at this point in the history
refactor: error catching
refactor: flake8 and pylint suggestions
feat: parameter to show debug messages
docs: added disclaimer
  • Loading branch information
sibalzer committed Oct 4, 2021
1 parent 495f278 commit 4948fc5
Show file tree
Hide file tree
Showing 2 changed files with 67 additions and 51 deletions.
8 changes: 8 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,8 @@ The script offers multiple arguments:
- -c | --cookie: Path to cookies.txt file
- -p | --publishers: Path to publishers.txt file
- -l | --loop: loops the script with a cooldown of 24h
- --dump: print the http-index page (used for issues/website changes)
- -d | --debug: Print debug messages (used for issues)
- -nh | --no-headless: starts the script with a visible browser (mainly for debugging)

If you use docker simply start the container.
Expand All @@ -49,4 +51,10 @@ If you want to use cron.d instead of letting the script wait 24h you must create

`0 0 * * * root : Primelooter ; /usr/bin/python3 /path/to/primelooter.py --cookie /path/to/cookie.txt --publishers /path/to/publishers.txt`


<a href='https://ko-fi.com/balzer' target='_blank'><img height='35' style='border:0px;height:46px;' src='https://az743702.vo.msecnd.net/cdn/kofi3.png?v=0' border='0' alt='Buy Me a Coffee at ko-fi.com' />


## Disclaimer

Use this bot at your own risk! For more information read the [license](LICENSE.md) file.
110 changes: 59 additions & 51 deletions primelooter.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,10 @@
import sys
import time
import traceback
import typing
import json
import typing

from playwright.sync_api import sync_playwright, Cookie, Browser, Page, BrowserContext, ElementHandle

from playwright.sync_api import sync_playwright, Browser, BrowserContext, Cookie, Error, Page

logging.basicConfig(
level=logging.INFO,
Expand Down Expand Up @@ -75,28 +73,27 @@ def check_eligibility(offer: dict) -> bool:
if offer['linkedJourney']:
for suboffer in offer['linkedJourney']['offers']:
if suboffer['self']['eligibility']:
return suboffer['self']['eligibility']['canClaim']
if suboffer['self']['eligibility']['canClaim']:
return True
return False
elif offer['self']:
if offer['self']:
return offer['self']['eligibility']['canClaim']
else:
raise Exception(
f'Could not check offer eligibility status\n{json.dumps(offer, indent=4)}')
raise Exception(
f'Could not check offer eligibility status\n{json.dumps(offer, indent=4)}')

def claim_external(self, url, publisher):
tab = self.context.new_page()

with tab.expect_response(lambda response: 'https://gaming.amazon.com/graphql' in response.url and 'journey' in response.json()['data']) as response_info:
log.debug('get game title')
tab.goto(url)
game_name = response_info.value.json(
)['data']['journey']['assets']['title']

log.debug("Try to claim %s from %s", game_name, publisher)
tab.wait_for_selector(
'div[data-a-target=loot-card-available]')

try:
with tab.expect_response(lambda response: 'https://gaming.amazon.com/graphql' in response.url and 'journey' in response.json()['data']) as response_info:
log.debug('get game title')
tab.goto(url)
game_name = response_info.value.json(
)['data']['journey']['assets']['title']

log.debug("Try to claim %s from %s", game_name, publisher)
tab.wait_for_selector(
'div[data-a-target=loot-card-available]')

loot_cards = tab.query_selector_all(
'div[data-a-target=loot-card-available]')

Expand Down Expand Up @@ -132,37 +129,43 @@ def claim_external(self, url, publisher):
if tab.query_selector('button[data-a-target=close-modal-button]'):
tab.query_selector(
'button[data-a-target=close-modal-button]').click()
except Exception as ex:
except Error as ex:
print(ex)
traceback.print_tb(ex.__traceback__)
log.error(
f"An error occured ({publisher}/{game_name})! Did they make some changes to the website? Please report @github if this happens multiple times.")
tab.close()
"An error occured (%s/%s)! Did they make some changes to the website? Please report @github if this happens multiple times.", publisher, game_name)
finally:
tab.close()

def claim_direct(self):
tab = self.context.new_page()
tab.goto('https://gaming.amazon.com/home')

FGWP_XPATH = 'xpath=//button[@data-a-target="FGWPOffer"]/ancestor::div[@data-test-selector="Offer"]'
try:
tab.goto('https://gaming.amazon.com/home')

elements = self.page.query_selector_all(FGWP_XPATH)
fgwp_xpath = 'xpath=//button[@data-a-target="FGWPOffer"]/ancestor::div[@data-test-selector="Offer"]'

if len(elements) == 0:
log.error(
"No direct offers found! Did they make some changes to the website? Please report @github if this happens multiple times.")
elements = self.page.query_selector_all(fgwp_xpath)

for elem in elements:
elem.scroll_into_view_if_needed()
self.page.wait_for_load_state('networkidle')
if len(elements) == 0:
log.error(
"No direct offers found! Did they make some changes to the website? Please report @github if this happens multiple times.")

publisher = elem.query_selector(
'p.tw-c-text-alt-2').text_content()
game_name = elem.query_selector('h3').text_content()
for elem in elements:
elem.scroll_into_view_if_needed()
self.page.wait_for_load_state('networkidle')

log.debug("Try to claim %s by %s", game_name, publisher)
elem.query_selector("button[data-a-target=FGWPOffer]").click()
log.info("Claimed %s by %s", game_name, publisher)
publisher = elem.query_selector(
'p.tw-c-text-alt-2').text_content()
game_name = elem.query_selector('h3').text_content()

tab.close()
log.debug("Try to claim %s by %s", game_name, publisher)
elem.query_selector("button[data-a-target=FGWPOffer]").click()
log.info("Claimed %s by %s", game_name, publisher)
except Error as ex:
log.error(ex)
traceback.print_tb(ex.__traceback__)
finally:
tab.close()

def run(self, dump: bool = False):
self.auth()
Expand All @@ -172,7 +175,7 @@ def run(self, dump: bool = False):
offers = self.get_offers()

not_claimable_offers = [offer for offer in offers if offer.get(
'linkedJourney') == None and offer.get('self') == None]
'linkedJourney') is None and offer.get('self') is None]
external_offers = [
offer for offer in offers if offer['deliveryMethod'] == 'EXTERNAL_OFFER' and offer not in not_claimable_offers and PrimeLooter.check_eligibility(offer)]
direct_offers = [
Expand All @@ -194,10 +197,10 @@ def run(self, dump: bool = False):
log.info(msg)
self.claim_direct()
else:
log.info("No direct offers to Claim")
log.info("No direct offers to claim")

# filter publishers
if not 'all' in self.publishers:
if 'all' not in self.publishers:
external_offers = [offer for offer in external_offers if offer['content']
['publisher'] in self.publishers]

Expand All @@ -210,14 +213,10 @@ def run(self, dump: bool = False):
log.info(msg)

for offer in external_offers:
try:
if PrimeLooter.check_eligibility(offer):
self.claim_external(
offer['content']['externalURL'], offer['content']['publisher'])
except Exception as ex:
log.error(ex)
self.claim_external(
offer['content']['externalURL'], offer['content']['publisher'])
else:
log.info("No external offers to Claim")
log.info("No external offers to claim")


def read_cookiefile(path: str) -> typing.List[Cookie]:
Expand Down Expand Up @@ -260,13 +259,20 @@ def read_cookiefile(path: str) -> typing.List[Cookie]:
action='store_true',
default=False)

parser.add_argument('-d', '--dump',
parser.add_argument('--dump',
dest='dump',
help='Dump html to output',
required=False,
action='store_true',
default=False)

parser.add_argument('-d', '--debug',
dest='debug',
help='Print Log at debug level',
required=False,
action='store_true',
default=False)

parser.add_argument('-nh', '--no-headless',
dest='headless',
help='Shall the script not use headless mode?',
Expand All @@ -284,6 +290,9 @@ def read_cookiefile(path: str) -> typing.List[Cookie]:
headless = arg['headless']
dump = arg['dump']

if arg['debug']:
log.level = logging.DEBUG

if arg['loop']:
while True:
try:
Expand All @@ -305,4 +314,3 @@ def read_cookiefile(path: str) -> typing.List[Cookie]:
log.error("Error %s", ex)
traceback.print_tb(ex.__traceback__)
raise ex

0 comments on commit 4948fc5

Please sign in to comment.