Skip to content

Commit

Permalink
add sleep time to avoid 2 req per second limit (#14)
Browse files Browse the repository at this point in the history
  • Loading branch information
keyn4 authored Apr 18, 2023
1 parent 892d726 commit 544467e
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 0 deletions.
2 changes: 2 additions & 0 deletions tap_shopify/streams/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import functools
import math
import sys
import time

import backoff
import pyactiveresource
Expand Down Expand Up @@ -90,6 +91,7 @@ def __init__(self):
self.results_per_page = Context.get_results_per_page(RESULTS_PER_PAGE)

def get_bookmark(self):
time.sleep(0.5)
bookmark = (singer.get_bookmark(Context.state,
# name is overridden by some substreams
self.name,
Expand Down
2 changes: 2 additions & 0 deletions tap_shopify/streams/inventory_items.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from singer.utils import strftime,strptime_to_utc
from tap_shopify.streams.base import (Stream, shopify_error_handling)
from tap_shopify.context import Context
import time

LOGGER = singer.get_logger()

Expand All @@ -14,6 +15,7 @@ class InventoryItems(Stream):

@shopify_error_handling
def get_inventory_items(self, inventory_items_ids):
time.sleep(0.5)
return self.replication_object.find(
ids=inventory_items_ids,
limit=RESULTS_PER_PAGE)
Expand Down
2 changes: 2 additions & 0 deletions tap_shopify/streams/metafields.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import json
import shopify
import singer
import time

from tap_shopify.context import Context
from tap_shopify.streams.base import (Stream,
Expand All @@ -19,6 +20,7 @@ def get_selected_parents():
def get_metafields(parent_object, since_id):
# This call results in an HTTP request - the parent object never has a
# cache of this data so we have to issue that request.
time.sleep(0.5)
return parent_object.metafields(
limit=Context.get_results_per_page(RESULTS_PER_PAGE),
since_id=since_id)
Expand Down

0 comments on commit 544467e

Please sign in to comment.