-
Notifications
You must be signed in to change notification settings - Fork 97
Implement a means of getting more than 1000 data points #131
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 2 commits
e94e2d4
e9afd5b
3d267e9
c152480
b533482
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -22,13 +22,18 @@ | |
| import json | ||
| import platform | ||
| import pkg_resources | ||
| import re | ||
| from urllib.parse import urlparse | ||
| from urllib.parse import parse_qs | ||
| # import logging | ||
|
|
||
| import requests | ||
|
|
||
| from .errors import RequestError, ThrottlingError | ||
| from .model import Data, Feed, Group | ||
|
|
||
| API_PAGE_LIMIT = 1000 | ||
|
|
||
| # set outgoing version, pulled from setup.py | ||
| version = pkg_resources.require("Adafruit_IO")[0].version | ||
| default_headers = { | ||
|
|
@@ -60,6 +65,9 @@ def __init__(self, username, key, proxies=None, base_url='https://io.adafruit.co | |
| # constructing the path. | ||
| self.base_url = base_url.rstrip('/') | ||
|
|
||
| # Store the last response of a get or post | ||
| self._last_response = None | ||
|
|
||
| @staticmethod | ||
| def to_red(data): | ||
| """Hex color feed to red channel. | ||
|
|
@@ -111,10 +119,12 @@ def _handle_error(response): | |
| def _compose_url(self, path): | ||
| return '{0}/api/{1}/{2}/{3}'.format(self.base_url, 'v2', self.username, path) | ||
|
|
||
| def _get(self, path): | ||
| def _get(self, path, params=None): | ||
| response = requests.get(self._compose_url(path), | ||
| headers=self._headers({'X-AIO-Key': self.key}), | ||
| proxies=self.proxies) | ||
| proxies=self.proxies, | ||
| params=params) | ||
| self._last_response = response | ||
| self._handle_error(response) | ||
| return response.json() | ||
|
|
||
|
|
@@ -124,6 +134,7 @@ def _post(self, path, data): | |
| 'Content-Type': 'application/json'}), | ||
| proxies=self.proxies, | ||
| data=json.dumps(data)) | ||
| self._last_response = response | ||
| self._handle_error(response) | ||
| return response.json() | ||
|
|
||
|
|
@@ -132,6 +143,7 @@ def _delete(self, path): | |
| headers=self._headers({'X-AIO-Key': self.key, | ||
| 'Content-Type': 'application/json'}), | ||
| proxies=self.proxies) | ||
| self._last_response = response | ||
| self._handle_error(response) | ||
|
|
||
| # Data functionality. | ||
|
|
@@ -231,17 +243,52 @@ def receive_previous(self, feed): | |
| path = "feeds/{0}/data/previous".format(feed) | ||
| return Data.from_dict(self._get(path)) | ||
|
|
||
| def data(self, feed, data_id=None): | ||
| def data(self, feed, data_id=None, max_results=API_PAGE_LIMIT): | ||
|
||
| """Retrieve data from a feed. If data_id is not specified then all the data | ||
| for the feed will be returned in an array. | ||
| :param string feed: Name/Key/ID of Adafruit IO feed. | ||
| :param string data_id: ID of the piece of data to delete. | ||
| :param int max_results: The maximum number of results to return. To | ||
| return all data, set to None. | ||
| """ | ||
| if data_id is None: | ||
| path = "feeds/{0}/data".format(feed) | ||
| return list(map(Data.from_dict, self._get(path))) | ||
| path = "feeds/{0}/data/{1}".format(feed, data_id) | ||
| return Data.from_dict(self._get(path)) | ||
| if data_id: | ||
| path = "feeds/{0}/data/{1}".format(feed, data_id) | ||
| return Data.from_dict(self._get(path)) | ||
|
|
||
| params = {'limit': max_results} if max_results else None | ||
| data = [] | ||
| path = "feeds/{0}/data".format(feed) | ||
| while True: | ||
|
||
| data.extend(list(map(Data.from_dict, self._get(path, | ||
| params=params)))) | ||
| nlink = self.get_next_link() | ||
| if not nlink: | ||
| break | ||
| # Parse the link for the query parameters | ||
| params = parse_qs(urlparse(nlink).query) | ||
| if max_results: | ||
| if len(data) >= max_results: | ||
| break | ||
| params['limit'] = max_results - len(data) | ||
| return data | ||
|
|
||
| def get_next_link(self): | ||
| """Parse the `next` page URL in the pagination Link header. | ||
|
|
||
| This is necessary because of a bug in the API's implementation of the | ||
| link header. If that bug is fixed, the link would be accesible by | ||
| response.links['next']['url'] and this method would be broken. | ||
|
|
||
| :return: The url for the next page of data | ||
| :rtype: str | ||
| """ | ||
| if not self._last_response: | ||
| return | ||
| link_header = self._last_response.headers['link'] | ||
| res = re.search('rel="next", <(.+?)>', link_header) | ||
| if not res: | ||
| return | ||
| return res.groups()[0] | ||
|
|
||
| def create_data(self, feed, data): | ||
| """Create a new row of data in the specified feed. | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Could you comment what this is?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Currently, the web API returns a maximum of 1000 data points per page. I could have hard-coded this into the function definition for
data, but I prefer to have constants like this defined up front. That way if the API were to change for whatever reason, the python library would only need to be updated in this one location.