diff --git a/.travis.yml b/.travis.yml index f17c079e7..e066820c8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,8 +1,8 @@ language: python python: - - "3.6" - "3.7" - "3.8" + - "3.9" cache: directories: - $HOME/.cache/pip diff --git a/README.md b/README.md index b8ae1b208..e133d8566 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ [![Build Status](https://travis-ci.org/planetlabs/planet-client-python.svg?branch=master)](https://travis-ci.org/planetlabs/planet-client-python) -Python client library and CLI for Planet's public APIs. +Python client library for Planet's APIs. The client provides access to the following Planet APIs: * [analytics](https://developers.planet.com/docs/analytics/) @@ -14,7 +14,7 @@ The client provides access to the following Planet APIs: ### Prerequisites -* Python version 3.6+ +* Python version 3.7+ ### Install package @@ -28,49 +28,158 @@ flag is highly recommended for those new to [pip](https://pip.pypa.io). A PEX executable (Windows not supported) and source releases are [here](https://github.com/planetlabs/planet-client-python/releases/latest). -## Documentation +## Authentication -Online documentation: -https://planetlabs.github.io/planet-client-python/index.html +Planet's APIs require an account for use. +[Sign up here](https://www.planet.com/explorer/?signup). -Documentation is also provided for download -[here](https://github.com/planetlabs/planet-client-python/releases/latest). +## Quick Start -## Development +The client modules within the Python library are asynchronous, which greatly +speeds up many interactions with Planet's APIs. Support for asynchronous +development is native to Python 3.6+ via the +[`asyncio` module](https://docs.python.org/3/library/asyncio.html). A great +resource for getting started with asynchronous programming in Python is +https://project-awesome.org/timofurrer/awesome-asyncio. The Writings and Talks +sections are particularly helpful in getting oriented. -To contribute or develop with this library, see -[CONTRIBUTING](https://github.com/planetlabs/planet-client-python/CONTRIBUTING.md) +```python +import asyncio +import os +import planet -## API Key +API_KEY = os.getenv('PL_API_KEY') -The API requires an account for use. [Signup here](https://www.planet.com/explorer/?signup). +image_ids = ['3949357_1454705_2020-12-01_241c'] +order_details = planet.OrderDetails( + 'test_order', + [planet.Product(image_ids, 'analytic', 'psorthotile')] +) -This can be provided via the environment variable `PL_API_KEY` or the flag `-k` or `--api-key`. +async def create_order(order_details): + async with planet.Session(auth=(API_KEY, '')) as ps: + client = planet.OrdersClient(ps) + return await client.create_order(order_details) -Using `planet init` your account credentials (login/password) can be used to obtain the api key. +oid = asyncio.run(create_order(order_details)) +print(oid) +``` +Not into async? No problem. Just wrap the library and async operations together +and call from your synchronous code. -# Example CLI Usage +```python +def sync_create_order(order_details): + return asyncio.run(create_order(order_details)) -**Hint:** autocompletion can be enabled in some shells using: -```console - $ eval "$(_PLANET_COMPLETE=source planet)" +oid = sync_create_order(order_details) +print(oid) ``` -Basics and help: - -```console - $ planet --help +When using `asyncio.run` to develop synchronous code with the async library, +keep in mind this excerpt from the +[asyncio.run](https://docs.python.org/3/library/asyncio-task.html#asyncio.run) +documentation: + +"*This function always creates a new event loop and closes it at the end. It +should be used as a main entry point for asyncio programs, and should ideally +only be called once.*" + +Do you have a use case where native synchronous support is essential? If so, +please contribute to +[Determine need for synchronous support](https://github.com/planetlabs/planet-client-python/issues/251) + + +Why async? Because things get *really cool* when you want to work with multiple +orders. Here's an example of submitting two orders, waiting for them to +complete, and downloading them. The orders each clip a set of images to a +specific area of interest (AOI), so they cannot be combined into one order. +(hint: [Planet Explorer](https://www.planet.com/explorer/) was used to define +the AOIs and get the image ids.) + + +```python +import asyncio +import os + +import planet + +API_KEY = os.getenv('PL_API_KEY') + +iowa_aoi = { + "type": "Polygon", + "coordinates": [[ + [-91.198465, 42.893071], + [-91.121931, 42.893071], + [-91.121931, 42.946205], + [-91.198465, 42.946205], + [-91.198465, 42.893071]]] +} + +iowa_images = [ + '20200925_161029_69_2223', + '20200925_161027_48_2223' +] +iowa_order = planet.OrderDetails( + 'iowa_order', + [planet.Product(iowa_images, 'analytic', 'PSScene4Band')], + tools=[planet.Tool('clip', {'aoi': iowa_aoi})] +) + +oregon_aoi = { + "type": "Polygon", + "coordinates": [[ + [-117.558734, 45.229745], + [-117.452447, 45.229745], + [-117.452447, 45.301865], + [-117.558734, 45.301865], + [-117.558734, 45.229745]]] +} + +oregon_images = [ + '20200909_182525_1014', + '20200909_182524_1014' +] +oregon_order = planet.OrderDetails( + 'oregon_order', + [planet.Product(oregon_images, 'analytic', 'PSScene4Band')], + tools=[planet.Tool('clip', {'aoi': oregon_aoi})] +) + + +async def create_and_download(order_detail, client): + oid = await client.create_order(order_detail) + print(oid) + state = await client.poll(oid, verbose=True) + print(state) + filenames = await client.download_order(oid, progress_bar=True) + print(f'downloaded {oid}, {len(filenames)} files downloaded.') + + +async def main(): + async with planet.Session(auth=(API_KEY, '')) as ps: + client = planet.OrdersClient(ps) + await asyncio.gather( + create_and_download(iowa_order, client), + create_and_download(oregon_order, client) + ) + +asyncio.run(main()) ``` +[Example output](example_output.md) -Specific API client usage: -```console - $ planet data -``` -Specific command help: -```console - $ planet data download --help -``` +## Documentation + +Online documentation: +https://planetlabs.github.io/planet-client-python/index.html + +Documentation is also provided for download +[here](https://github.com/planetlabs/planet-client-python/releases/latest). + +## Development + +To contribute or develop with this library, see +[CONTRIBUTING](https://github.com/planetlabs/planet-client-python/CONTRIBUTING.md) diff --git a/example_output.md b/example_output.md new file mode 100644 index 000000000..93257a959 --- /dev/null +++ b/example_output.md @@ -0,0 +1,66 @@ +``` +a27c63c9-a076-4db2-a2e3-c1ff35655cbd +5d5892b5-3ec0-4df7-9852-a27a40baf0c1 +order a27c63c9-a076-4db2-a2e3-c1ff35655cbd state: queued +order 5d5892b5-3ec0-4df7-9852-a27a40baf0c1 state: queued +order a27c63c9-a076-4db2-a2e3-c1ff35655cbd state: running +order 5d5892b5-3ec0-4df7-9852-a27a40baf0c1 state: running +order a27c63c9-a076-4db2-a2e3-c1ff35655cbd state: running +order 5d5892b5-3ec0-4df7-9852-a27a40baf0c1 state: running +order a27c63c9-a076-4db2-a2e3-c1ff35655cbd state: running +order 5d5892b5-3ec0-4df7-9852-a27a40baf0c1 state: running +order a27c63c9-a076-4db2-a2e3-c1ff35655cbd state: running +order 5d5892b5-3ec0-4df7-9852-a27a40baf0c1 state: running +order a27c63c9-a076-4db2-a2e3-c1ff35655cbd state: running +order 5d5892b5-3ec0-4df7-9852-a27a40baf0c1 state: running +order a27c63c9-a076-4db2-a2e3-c1ff35655cbd state: running +order 5d5892b5-3ec0-4df7-9852-a27a40baf0c1 state: running +order a27c63c9-a076-4db2-a2e3-c1ff35655cbd state: running +order 5d5892b5-3ec0-4df7-9852-a27a40baf0c1 state: running +order a27c63c9-a076-4db2-a2e3-c1ff35655cbd state: running +order 5d5892b5-3ec0-4df7-9852-a27a40baf0c1 state: running +order a27c63c9-a076-4db2-a2e3-c1ff35655cbd state: running +order 5d5892b5-3ec0-4df7-9852-a27a40baf0c1 state: running +order a27c63c9-a076-4db2-a2e3-c1ff35655cbd state: running +order 5d5892b5-3ec0-4df7-9852-a27a40baf0c1 state: running +order a27c63c9-a076-4db2-a2e3-c1ff35655cbd state: running +order 5d5892b5-3ec0-4df7-9852-a27a40baf0c1 state: running +order a27c63c9-a076-4db2-a2e3-c1ff35655cbd state: running +order 5d5892b5-3ec0-4df7-9852-a27a40baf0c1 state: running +order a27c63c9-a076-4db2-a2e3-c1ff35655cbd state: running +order 5d5892b5-3ec0-4df7-9852-a27a40baf0c1 state: running +order a27c63c9-a076-4db2-a2e3-c1ff35655cbd state: running +order 5d5892b5-3ec0-4df7-9852-a27a40baf0c1 state: running +order a27c63c9-a076-4db2-a2e3-c1ff35655cbd state: running +order 5d5892b5-3ec0-4df7-9852-a27a40baf0c1 state: running +order a27c63c9-a076-4db2-a2e3-c1ff35655cbd state: running +order 5d5892b5-3ec0-4df7-9852-a27a40baf0c1 state: running +order a27c63c9-a076-4db2-a2e3-c1ff35655cbd state: running +order 5d5892b5-3ec0-4df7-9852-a27a40baf0c1 state: running +order a27c63c9-a076-4db2-a2e3-c1ff35655cbd state: success +success +order 5d5892b5-3ec0-4df7-9852-a27a40baf0c1 state: running +./20200909_182525_1014_3B_AnalyticMS_clip.tif: 100%|███████████████████████████████████████████████████████████████████████████████████| 35.8k/35.8k [00:03<00:00, 9.97MB/s] +./20200909_182525_1014_metadata.json: 100%|████████████████████████████████████████████████████████████████████████████████████████████| 0.00k/0.00k [00:00<00:00, 4.08MB/s] +./20200909_182525_1014_3B_AnalyticMS_metadata_clip.xml: 100%|██████████████████████████████████████████████████████████████████████████| 0.01k/0.01k [00:00<00:00, 7.12MB/s] +./20200909_182525_1014_3B_AnalyticMS_DN_udm_clip.tif: 100%|████████████████████████████████████████████████████████████████████████████| 0.06k/0.06k [00:00<00:00, 1.81MB/s] +./20200909_182524_1014_3B_AnalyticMS_clip.tif: 100%|███████████████████████████████████████████████████████████████████████████████████| 33.6k/33.6k [00:02<00:00, 12.2MB/s] +./20200909_182524_1014_3B_AnalyticMS_DN_udm_clip.tif: 100%|████████████████████████████████████████████████████████████████████████████| 0.08k/0.08k [00:00<00:00, 1.78MB/s] +order 5d5892b5-3ec0-4df7-9852-a27a40baf0c1 state: success +success +./20200909_182524_1014_3B_AnalyticMS_metadata_clip.xml: 100%|██████████████████████████████████████████████████████████████████████████| 0.01k/0.01k [00:00<00:00, 9.77MB/s] +./20200909_182524_1014_metadata.json: 100%|████████████████████████████████████████████████████████████████████████████████████████████| 0.00k/0.00k [00:00<00:00, 3.12MB/s] +./manifest.json: 100%|█████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 0.00k/0.00k [00:00<00:00, 8.29MB/s] +downloaded a27c63c9-a076-4db2-a2e3-c1ff35655cbd, 9 files downloaded. +./20200925_161029_69_2223_3B_AnalyticMS_metadata_clip.xml: 100%|███████████████████████████████████████████████████████████████████████| 0.01k/0.01k [00:00<00:00, 8.93MB/s] +./20200925_161029_69_2223_3B_AnalyticMS_clip.tif: 100%|████████████████████████████████████████████████████████████████████████████████| 21.3k/21.3k [00:01<00:00, 12.0MB/s] +./20200925_161029_69_2223_3B_AnalyticMS_DN_udm_clip.tif: 100%|█████████████████████████████████████████████████████████████████████████| 0.07k/0.07k [00:00<00:00, 1.99MB/s] +./20200925_161029_69_2223_metadata.json: 100%|█████████████████████████████████████████████████████████████████████████████████████████| 0.00k/0.00k [00:00<00:00, 1.78MB/s] +./20200925_161027_48_2223_metadata.json: 100%|█████████████████████████████████████████████████████████████████████████████████████████| 0.00k/0.00k [00:00<00:00, 2.24MB/s] +./20200925_161027_48_2223_3B_AnalyticMS_clip.tif: 100%|████████████████████████████████████████████████████████████████████████████████| 33.9k/33.9k [00:02<00:00, 11.9MB/s] +./20200925_161027_48_2223_3B_AnalyticMS_DN_udm_clip.tif: 100%|█████████████████████████████████████████████████████████████████████████| 0.08k/0.08k [00:00<00:00, 1.85MB/s] +./20200925_161027_48_2223_3B_AnalyticMS_metadata_clip.xml: 100%|███████████████████████████████████████████████████████████████████████| 0.01k/0.01k [00:00<00:00, 11.7MB/s] +./manifest.json: 100%|█████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 0.00k/0.00k [00:00<00:00, 8.52MB/s] +downloaded 5d5892b5-3ec0-4df7-9852-a27a40baf0c1, 9 files downloaded. +``` + diff --git a/planet/__init__.py b/planet/__init__.py index 75c2286a5..69907f552 100644 --- a/planet/__init__.py +++ b/planet/__init__.py @@ -11,3 +11,26 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from .api.http import Session +from .api.models import Order +from .api.orders import OrdersClient +from .api.order_details import ( + OrderDetails, Product, Notifications, Delivery, AmazonS3Delivery, + AzureBlobStorageDelivery, GoogleCloudStorageDelivery, + GoogleEarthEngineDelivery, Tool) +from .api.__version__ import __version__ # NOQA + +__all__ = [ + Session, + OrdersClient, + Order, + OrderDetails, + Product, + Notifications, + Delivery, + AmazonS3Delivery, + AzureBlobStorageDelivery, + GoogleCloudStorageDelivery, + GoogleEarthEngineDelivery, + Tool +] diff --git a/planet/api/__init__.py b/planet/api/__init__.py index f16357e59..75c2286a5 100644 --- a/planet/api/__init__.py +++ b/planet/api/__init__.py @@ -11,10 +11,3 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -from .orders import OrdersClient -from .__version__ import __version__ # NOQA - -__all__ = [ - OrdersClient -] diff --git a/planet/api/auth.py b/planet/api/auth.py deleted file mode 100644 index 57e8da7ba..000000000 --- a/planet/api/auth.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright 2015 Planet Labs, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Handle authentication with Planet API and management of authentication data. -""" - -import json -import os - -from ._fatomic import atomic_open - - -ENV_KEY = 'PL_API_KEY' - -PLANET_AUTH_FILENAME = '.planet.json' - - -class APIKey(): - def __init__(self, value): - self.value = value - - -def find_api_key(): - api_key = os.getenv(ENV_KEY) - if api_key is None: - contents = read_planet_auth() - api_key = contents.get('key', None) - return api_key - - -def read_planet_auth(): - fname = _planet_auth_file() - contents = {} - if os.path.exists(fname): - with open(fname, 'r') as fp: - contents = json.loads(fp.read()) - return contents - - -def write_planet_auth(contents): - fname = _planet_auth_file() - with atomic_open(fname, 'w') as fp: - fp.write(json.dumps(contents)) - - -def _planet_auth_file(): - return os.path.join(os.path.expanduser('~'), PLANET_AUTH_FILENAME) diff --git a/planet/api/http.py b/planet/api/http.py index 3a6dd60c1..214bc5867 100644 --- a/planet/api/http.py +++ b/planet/api/http.py @@ -13,135 +13,183 @@ # the License. """Functionality to perform HTTP requests""" - -# import http.client as http_client +import asyncio +from http import HTTPStatus import logging -import os -import re -import time -from requests import Session -from requests.compat import urlparse +import httpx from . import exceptions, models from . __version__ import __version__ +RETRY_COUNT = 5 +RETRY_WAIT_TIME = 1 # seconds LOGGER = logging.getLogger(__name__) -DEFAULT_CHUNK_SIZE = 32 * 1024 -USE_STRICT_SSL = not (os.getenv('DISABLE_STRICT_SSL', '').lower() == 'true') - -RETRY_WAIT_TIME = 1 # seconds +class SessionException(Exception): + '''exceptions thrown by Session''' + pass -# def setup_logging(): -# log_level = LOGGER.getEffectiveLevel() -# urllib3_logger = logging.getLogger( -# 'requests.packages.urllib3') -# urllib3_logger.setLevel(log_level) -# -# # if debug level set then its nice to see the headers of the request -# if log_level == logging.DEBUG: -# http_client.HTTPConnection.set_debuglevel(1) -# else: -# http_client.HTTPConnection.set_debuglevel(0) -# - -def _log_request(req): - LOGGER.info('%s %s %s %s', req.method, req.url, req.params, req.data) +class Session(): + '''Context manager for asynchronous communication with the Planet server. + Authentication for Planet servers is given as ('', ''). -class PlanetSession(): - """Provides communication with the Planet server""" + :param auth: Planet server authentication. + :type auth: httpx.Auth or tuple. + ''' - def __init__(self): - # general session for sync api calls - self._session = RedirectSession() - self._session.headers.update({'User-Agent': self._get_user_agent()}) - self._session.verify = USE_STRICT_SSL + def __init__(self, auth=None): + self._client = httpx.AsyncClient(auth=auth) + self._client.headers.update({'User-Agent': self._get_user_agent()}) + self._client.event_hooks['request'] = [self._log_request] + self._client.event_hooks['response'] = [ + self._log_response, + self._raise_for_status + ] self.retry_wait_time = RETRY_WAIT_TIME + self.retry_count = RETRY_COUNT - @staticmethod - def _get_user_agent(): - return 'planet-client-python/' + __version__ - - def __enter__(self): + async def __aenter__(self): return self - def __exit__(self, *args): - self.close() + async def __aexit__(self, *args): + await self.aclose() - def close(self): - self._session.close() + async def aclose(self): + await self._client.aclose() - def request(self, request, retry_count=5): - '''Submit a request with retry. + async def retry(self, func, *a, **kw): + '''Run an asynchronous request function with retry.''' + # TODO: retry will be provided in httpx v1 [1] with usage [2] + # 1. https://github.com/encode/httpcore/pull/221 + # 2. https://github.com/encode/httpx/blob/ + # 89fb0cbc69ea07b123dd7b36dc1ed9151c5d398f/docs/async.md#explicit-transport-instances # noqa + # TODO: if throttling is necessary, check out [1] once v1 + # 1. https://github.com/encode/httpx/issues/984 + retry_count = self.retry_count + wait_time = self.retry_wait_time - :param :py:Class:`planet.api.models.Request` req: request to submit - :param int retry_count: number of retries - :returns: :py:Class:`planet.api.models.Response` - ''' max_retry = retry_count + 1 for i in range(max_retry): try: - resp = self._do_request(request) - return resp + return await func(*a, **kw) except exceptions.TooManyRequests: if i < max_retry: LOGGER.debug(f'Try {i}') - LOGGER.info('Too Many Requests: sleeping and retrying') + LOGGER.info(f'Too Many Requests: sleeping {wait_time}s') # TODO: consider exponential backoff # https://developers.planet.com/docs/data/api-mechanics/ - time.sleep(self.retry_wait_time) - raise Exception('too many throttles, giving up') - - def _do_request(self, request, **kwargs): + await asyncio.sleep(wait_time) + raise SessionException('Too many throttles, giving up.') + + async def request(self, request, stream=False): + '''Submit a request with retry.''' + # TODO: retry will be provided in httpx v1 [1] with usage [2] + # 1. https://github.com/encode/httpcore/pull/221 + # 2. https://github.com/encode/httpx/blob/ + # 89fb0cbc69ea07b123dd7b36dc1ed9151c5d398f/docs/async.md#explicit-transport-instances # noqa + # TODO: if throttling is necessary, check out [1] once v1 + # 1. https://github.com/encode/httpx/issues/984 + return await self.retry(self._request, request, stream=stream) + + async def _request(self, request, stream=False): '''Submit a request - :param :py:Class:`planet.api.models.Request` req: request to submit - :returns: :py:Class:`planet.api.models.Response` + :param request: Request to submit + :type request: planet.api.models.Request + :param stream: Get the body as a stream. Defaults to False. + :type stream: boolean, optional + :returns: response + :rtype: planet.api.models.Response ''' - # TODO: I don't know where kwargs are used, maybe nowhere? - LOGGER.debug('about to submit request') - _log_request(request) - - t = time.time() - http_resp = self._session.request( - request.method, request.url, data=request.data, - headers=request.headers, params=request.params) - LOGGER.debug('request took %.03f', time.time() - t) - - resp = models.Response(request, http_resp) - resp.raise_for_status() - return resp - - -class RedirectSession(Session): - '''This exists to override the existing behavior of requests that will - strip Authorization headers from any redirect requests that resolve to a - new domain. Instead, we'll keep headers if the redirect is a subdomain - and if not, extract the api-key from the header and add it to the url - as a parameter. - ''' - def rebuild_auth(self, prepared_request, response): - existing_auth = prepared_request.headers.get('Authorization', None) - if existing_auth: - orig = response.request.url - redir = prepared_request.url - if not self._is_subdomain_of_tld(orig, redir): - prepared_request.headers.pop('Authorization') - key = re.match(r'api-key (\S+)', existing_auth) - if key: - prepared_request.prepare_url( - prepared_request.url, { - 'api_key': key.group(1) - } - ) + http_resp = await self._client.send(request.http_request, + stream=stream) + return models.Response(request, http_resp) + + def stream(self, request): + '''Submit a request and get the response as a stream context manager. + + :param request: Request to submit + :type request: planet.api.models.Request + :returns: Context manager providing the body as a stream. + :rtype: Stream + ''' + return Stream( + session=self, + request=request + ) @staticmethod - def _is_subdomain_of_tld(url1, url2): - orig_host = urlparse(url1).hostname - re_host = urlparse(url2).hostname - return orig_host.split('.')[-2:] == re_host.split('.')[-2:] + def _get_user_agent(): + return 'planet-client-python/' + __version__ + + @staticmethod + async def _log_request(request): + LOGGER.info(f'{request.method} {request.url} - Sent') + + @staticmethod + async def _log_response(response): + request = response.request + LOGGER.info( + f'{request.method} {request.url} - ' + f'Status {response.status_code}') + + @staticmethod + async def _raise_for_status(response): + # TODO: consider using http_response.reason_phrase + status = response.status_code + + miminum_bad_request_code = HTTPStatus.MOVED_PERMANENTLY + if status < miminum_bad_request_code: + return + + exception = { + HTTPStatus.BAD_REQUEST: exceptions.BadQuery, + HTTPStatus.UNAUTHORIZED: exceptions.InvalidAPIKey, + HTTPStatus.FORBIDDEN: exceptions.NoPermission, + HTTPStatus.NOT_FOUND: exceptions.MissingResource, + HTTPStatus.TOO_MANY_REQUESTS: exceptions.TooManyRequests, + HTTPStatus.INTERNAL_SERVER_ERROR: exceptions.ServerError + }.get(status, None) + + try: + msg = response.text + except httpx.ResponseNotRead: + await response.aread() + msg = response.text + + # differentiate between over quota and rate-limiting + if status == 429 and 'quota' in msg.lower(): + exception = exceptions.OverQuota + + if exception: + raise exception(msg) + + raise exceptions.APIException(f'{status}: {msg}') + + +class Stream(): + '''Context manager for asynchronous response stream from Planet server. + + :param session: Open session to Planet server + :type session: Session + :param request: Request to submit + :type request: planet.api.models.Request + ''' + def __init__(self, session, request): + self.session = session + self.request = request + + async def __aenter__(self): + self.response = await self.session.request( + request=self.request, + stream=True, + ) + return self.response + + async def __aexit__(self, exc_type=None, exc_value=None, traceback=None): + await self.response.aclose() diff --git a/planet/api/models.py b/planet/api/models.py index b6ac65ad0..adc643e08 100644 --- a/planet/api/models.py +++ b/planet/api/models.py @@ -12,16 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. """Manage data for requests and responses.""" - +import copy import datetime +import json import logging -import os - -from ._fatomic import atomic_open -from . import exceptions, utils - +import mimetypes +import random +import re +import string -CHUNK_SIZE = 32 * 1024 +import httpx +from tqdm.asyncio import tqdm LOGGER = logging.getLogger(__name__) @@ -36,52 +37,43 @@ class Request(): :param url: URL of API endpoint :type url: str - :param auth: Planet API authentication - :type auth: :py:Class:'planet.auth.auth - :param params: values to send in the query string, defaults to None + :param params: Values to send in the query string. Defaults to None. :type params: dict, list of tuples, or bytes, optional - :param body_type: Expected response body type, defaults to `Body` - :type body_type: type, optional - :param data: object to send in the body, defaults to None + :param data: Object to send in the body. Defaults to None. :type data: dict, list of tuples, bytes, or file-like object, optional - :param method: HTTP request method, defaults to 'GET' + :param json: JSON to send. Defaults to None. + :type json: dict, optional + :param method: HTTP request method. Defaults to 'GET' :type method: str, optional :raises RequestException: When provided `body_type` is not a subclass of :py:class:`planet.api.models.Body` ''' - def __init__(self, url, auth, params=None, body_type=None, data=None, - method='GET'): - self.url = url - self.auth = auth - self.params = params - - self.body_type = body_type or Body - if not issubclass(self.body_type, Body): - raise RequestException( - f'body_type ({self.body_type}) must be a subclass of Body' - ) + def __init__(self, url, params=None, data=None, json=None, method='GET'): + if data or json: + headers = {'Content-Type': 'application/json'} + else: + headers = None - self.data = data - self.method = method + self.http_request = httpx.Request( + method, + url, + params=params, + data=data, + json=json, + headers=headers) @property - def headers(self): - '''Prepare headers for request. + def url(self): + return self.http_request.url - :returns: prepared headers - :rtype: dict + @url.setter + def url(self, url): + '''Set the url. + + :param url: URL of API endpoint + :type url: str ''' - headers = {} - if self.data: - headers['Content-Type'] = 'application/json' - - if self.auth: - headers.update({ - 'Authorization': 'api-key %s' % self.auth.value - }) - else: - raise exceptions.InvalidAPIKey('No API key provided') - return headers + self.http_request.url = httpx.URL(url) class Response(): @@ -95,21 +87,9 @@ class Response(): def __init__(self, request, http_response): self.request = request self.http_response = http_response - self._body = None - - @property - def body(self): - '''The response Body - - :returns: A Body object containing the response - :rtype: :py:Class:`Body` - ''' - if self._body is None: - self._body = self._create_body() - return self._body - def _create_body(self): - return self.request.body_type(self.request, self.http_response) + def __repr__(self): + return f'' @property def status_code(self): @@ -120,54 +100,28 @@ def status_code(self): ''' return self.http_response.status_code - def __repr__(self): - return '' % (self.status_code) - - def raise_for_status(self): - '''Raises :class: `APIException` if one occured.''' - return self._raise_for_status(self.status_code, self.http_response) - - @staticmethod - def _raise_for_status(status, http_response): - LOGGER.debug(f'status code: {status}') - - if status < 300: - return - - exception = { - 400: exceptions.BadQuery, - 401: exceptions.InvalidAPIKey, - 403: exceptions.NoPermission, - 404: exceptions.MissingResource, - 429: exceptions.TooManyRequests, - 500: exceptions.ServerError - }.get(status, None) - - # differentiate between over quota and rate-limiting - res = http_response - if status == 429 and 'quota' in res.text.lower(): - exception = exceptions.OverQuota + @property + def json(self): + '''Response json. - if exception: - raise exception(res.text) + :returns: json + :rtype: dict + ''' + return self.http_response.json - raise exceptions.APIException('%s: %s' % (status, res.text)) + async def aclose(self): + await self.http_response.aclose() -class Body(): - '''A Body is a representation of a resource from the API. +class StreamingBody(): + '''A representation of a streaming resource from the API. - :param request: Request that was submitted to the server - :type request: :py:Class:`planet.api.models.Request - :param http_response: Response that was received from the server - :type http_response: :py:Class:`requests.models.Response` + :param response: Response that was received from the server + :type response: :py:Class:`requests.models.Response` ''' - def __init__(self, request, http_response): - self._request = request - self.response = http_response - - self.size = int(self.response.headers.get('content-length', 0)) - self._cancel = False + def __init__(self, response): + self.response = response.http_response + self.url = response.request.url @property def name(self): @@ -180,105 +134,277 @@ def name(self): :returns: name of this resource :rtype: str ''' - return utils.get_filename(self.response) + name = (_get_filename_from_headers(self.response.headers) or + _get_filename_from_url(self.url) or + _get_random_filename( + self.response.headers.get('content-type'))) + return name - def __len__(self): - return self.size + @property + def size(self): + '''The size of the body. - def __iter__(self): - return (c for c in self.response.iter_content(chunk_size=CHUNK_SIZE)) + :returns: size of the body + :rtype: int + ''' + return int(self.response.headers['Content-Length']) + + @property + def num_bytes_downloaded(self): + '''The number of bytes downloaded. + + :returns: number of bytes downloaded + :rtype: int + ''' + return self.response.num_bytes_downloaded def last_modified(self): - '''Read the last-modified header as a datetime, if present.''' + '''Read the last-modified header as a datetime, if present. + + :returns: last-modified header + :rtype: datatime or None + ''' lm = self.response.headers.get('last-modified', None) return datetime.strptime(lm, '%a, %d %b %Y %H:%M:%S GMT') if lm \ else None - def get_raw(self): - '''Get the decoded text content from the response''' - return self.response.content.decode('utf-8') - - def _write(self, fp, callback): - total = 0 - if not callback: - def noop(*a, **kw): - pass - callback = noop - callback(start=self) - for chunk in self: - if self._cancel: - raise exceptions.RequestCancelled() - fp.write(chunk) - size = len(chunk) - total += size - callback(wrote=size, total=total) - # seems some responses don't have a content-length header - if self.size == 0: - self.size = total - callback(finish=self) - - def write(self, file=None, callback=None): - '''Write the contents of the body to the optionally provided file and - providing progress to the optional callback. The callback will be - invoked 3 different ways: - - * First as ``callback(start=self)`` - * For each chunk of data written as - ``callback(wrote=chunk_size_in_bytes, total=all_byte_cnt)`` - * Upon completion as ``callback(finish=self)`` - - :param file: path or file-like object to write to, defaults to the - name of body - :type file: str or file-like object - :param callback: A function handle of the form - ``callback(start, wrote, total, finish, skip)`` that receives write - progress. Defaults to None - :type callback: function, optional + async def aiter_bytes(self): + async for c in self.response.aiter_bytes(): + yield c + + async def write(self, filename, overwrite=True, progress_bar=True): + '''Write the body to a file. + + :param filename: Name to assign to downloaded file. + :type filename: str + :param overwrite: Overwrite any existing files. Defaults to True + :type overwrite: boolean, optional + :param progress_bar: Show progress bar during download. Defaults to + True. + :type progress_bar: boolean, optional ''' - if not file: - file = self.name - if not file: - raise ValueError('no file name provided or discovered in response') - if hasattr(file, 'write'): - self._write(file, callback) - else: - with atomic_open(file, 'wb') as fp: - self._write(fp, callback) - - def write_to_file(self, filename=None, overwrite=True, callback=None): - '''Write the contents of the body to the optionally provided filename. - - providing progress to the optional callback. The callback will be - invoked 3 different ways: - - * First as ``callback(start=self)`` - * For each chunk of data written as - ``callback(wrote=chunk_size_in_bytes, total=all_byte_cnt)`` - * Upon completion as ``callback(finish=self)` - * Upon skip as `callback(skip=self)` - - :param filename: Filename to write to, defaults to body name - :type filename: str, optional - :param overwrite: Specify whether the file at filename - should be overwritten if it exists, defaults to True - :type overwrite: bool, optional - :param callback: A function handle of the form - ``callback(start, wrote, total, finish, skip)`` that receives write - progress. Defaults to None - :type callback: function, optional + class _LOG(): + def __init__(self, total, unit, filename, disable): + self.total = total + self.unit = unit + self.disable = disable + self.previous = 0 + self.filename = filename + + if not self.disable: + LOGGER.debug(f'writing to {self.filename}') + + def update(self, new): + if new-self.previous > self.unit and not self.disable: + # LOGGER.debug(f'{new-self.previous}') + perc = int(100 * new / self.total) + LOGGER.debug(f'{self.filename}: ' + f'wrote {perc}% of {self.total}') + self.previous = new + + unit = 1024*1024 + + mode = 'wb' if overwrite else 'xb' + try: + with open(filename, mode) as fp: + _log = _LOG(self.size, 16*unit, filename, disable=progress_bar) + with tqdm(total=self.size, unit_scale=True, + unit_divisor=unit, unit='B', + desc=filename, disable=not progress_bar) as progress: + previous = self.num_bytes_downloaded + async for chunk in self.aiter_bytes(): + fp.write(chunk) + new = self.num_bytes_downloaded + _log.update(new) + progress.update(new-previous) + previous = new + except FileExistsError: + LOGGER.info(f'File {filename} exists, not overwriting') + + +def _get_filename_from_headers(headers): + """Get a filename from the Content-Disposition header, if available. + + :param headers dict: a ``dict`` of response headers + :returns: a filename (i.e. ``basename``) + :rtype: str or None + """ + cd = headers.get('content-disposition', '') + match = re.search('filename="?([^"]+)"?', cd) + return match.group(1) if match else None + + +def _get_filename_from_url(url): + """Get a filename from a URL. + + :returns: a filename (i.e. ``basename``) + :rtype: str or None + """ + path = url.path + name = path[path.rfind('/')+1:] + return name or None + + +def _get_random_filename(content_type=None): + """Get a pseudo-random, Planet-looking filename. + + :returns: a filename (i.e. ``basename``) + :rtype: str + """ + extension = mimetypes.guess_extension(content_type or '') or '' + characters = string.ascii_letters + '0123456789' + letters = ''.join(random.sample(characters, 8)) + name = 'planet-{}{}'.format(letters, extension) + return name + + +class Paged(): + '''Asynchronous iterator over results in a paged resource from the Planet + server. + + Each returned result is a json dict. + + :param request: Open session connected to server + :type request: planet.api.http.ASession + :param do_request_fcn: Function for submitting a request. Takes as input + a planet.api.models.Request and returns planet.api.models.Response. + :type do_request_fcn: function + :param limit: Limit orders to given limit. Defaults to None + :type limit: int, optional + ''' + LINKS_KEY = 'links' + NEXT_KEY = 'next' + ITEMS_KEY = 'items' + TYPE = None + + def __init__(self, request, do_request_fcn, limit=None): + self.request = request + self._do_request = do_request_fcn + + self._pages = None + self._items = [] + + self.i = 0 + self.limit = limit + + def __aiter__(self): + return self + + async def __anext__(self): + '''Asynchronous next. + + :returns: next item as json + :rtype: dict ''' - if overwrite or not os.path.exists(filename): - self.write(filename, callback=callback) - else: - if callback: - callback(skip=self) + # This was implemented because traversing _get_pages() + # in an async generator was resulting in retrieving all the + # pages, when the goal is to stop retrieval when the limit + # is reached + if self.limit is not None and self.i >= self.limit: + raise StopAsyncIteration + + try: + item = self._items.pop(0) + self.i += 1 + except IndexError: + self._pages = self._pages or self._get_pages() + page = await self._pages.__anext__() + self._items = page[self.ITEMS_KEY] + try: + item = self._items.pop(0) + self.i += 1 + except IndexError: + raise StopAsyncIteration + + return item + + async def _get_pages(self): + request = copy.deepcopy(self.request) + LOGGER.debug('getting first page') + resp = await self._do_request(request) + page = resp.json() + yield page + + next_url = self._next_link(page) + while(next_url): + LOGGER.debug('getting next page') + request.url = next_url + resp = await self._do_request(request) + page = resp.json() + yield page + next_url = self._next_link(page) + + def _next_link(self, page): + try: + next_link = page[self.LINKS_KEY][self.NEXT_KEY] + LOGGER.debug(f'next: {next_link}') + except KeyError: + LOGGER.debug('end of the pages') + next_link = False + return next_link + + +class Order(): + '''Managing description of an order returned from Orders API. + + :param data: Response json describing order + :type data: dict + ''' + LINKS_KEY = '_links' + RESULTS_KEY = 'results' + LOCATION_KEY = 'location' + def __init__(self, data): + self.data = data -class JSON(Body): - '''A Body that contains JSON''' + def __str__(self): + return " " + json.dumps(self.data) @property - def data(self): - '''The response as a JSON dict''' - data = self.response.json() - return data + def results(self): + '''Results for each item in order. + + :return: result for each item in order + :rtype: list of dict + ''' + links = self.data[self.LINKS_KEY] + results = links.get(self.RESULTS_KEY, None) + return results + + @property + def locations(self): + '''Download locations for order results. + + :return: download locations in order + :rtype: list of str + ''' + return list(r[self.LOCATION_KEY] for r in self.results) + + @property + def state(self): + '''State of the order. + + :return: state of order + :rtype: str + ''' + return self.data['state'] + + @property + def id(self): + '''ID of the order. + + :return: id of order + :rtype: str + ''' + return self.data['id'] + + +class Orders(Paged): + '''Asynchronous iterator over Orders from a paged response describing + orders.''' + LINKS_KEY = '_links' + NEXT_KEY = 'next' + ITEMS_KEY = 'orders' + + async def __anext__(self): + return Order(await super().__anext__()) diff --git a/planet/api/order_details.py b/planet/api/order_details.py new file mode 100644 index 000000000..5bcb8d7d7 --- /dev/null +++ b/planet/api/order_details.py @@ -0,0 +1,539 @@ +# Copyright 2020 Planet Labs, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +"""Functionality for preparing order details for use in creating an order""" +from __future__ import annotations # https://stackoverflow.com/a/33533514 +import copy +import json +from typing import List + +from .. import specs + + +class OrderDetailsException(Exception): + """Exceptions thrown by OrderDetails""" + pass + + +class OrderDetails(): + '''Validating and preparing order details for submission. + + **Parameters** + + * **name** - Name of the order. + * **products** - Product(s) from the Data API to order. + * **subscription_id** - *(optional)* Apply this orders against this quota + subscription. Defaults to 0. + * **delivery** - *(optional)* Specify custom delivery handling. Defaults + to None. + * **notifications** - *(optional)* Specify custom notifications handling. + * **order_type** - *(optional)* Accept a partial order or only accept a + full order. Options are 'partial' and 'full'. Defaults to None. + * **tools** - *(optional)* Tools to apply to the products. Order defines + the toolchain order of operatations. Defaults to None. + ''' + def __init__( + self, + name: str, + products: List[Product], + subscription_id: int = 0, + delivery: Delivery = None, + notifications: Notifications = None, + order_type: str = None, + tools: List[Tool] = None + ): + self.name = name + self.products = products + self.subscription_id = subscription_id + self.delivery = delivery + self.notifications = notifications + self.order_type = order_type + self.tools = tools + + if self.order_type is not None: + self.order_type = specs.validate_order_type(order_type) + + @classmethod + def from_dict(cls, details: dict) -> OrderDetails: + '''Create OrderDetails instance from Orders API spec representation.''' + name = details['name'] + products = [Product.from_dict(p) for p in details['products']] + + subscription_id = details.get('subscription_id', None) + + delivery = details.get('delivery', None) + if delivery: + delivery = Delivery.from_dict(delivery) + + notifications = details.get('notifications', None) + if notifications: + notifications = Notifications.from_dict(notifications) + + order_type = details.get('order_type', None) + tools = [Tool.from_dict(t) for t in details.get('tools', [])] + + return cls(name, + products, + subscription_id, + delivery, + notifications, + order_type, + tools) + + def to_dict(self) -> dict: + '''Get Orders API spec representation.''' + details = { + 'name': self.name, + 'products': [p.to_dict() for p in self.products] + } + + if self.subscription_id is not None: + details['subscription_id'] = self.subscription_id + + if self.delivery is not None: + details['delivery'] = self.delivery.to_dict() + + if self.notifications is not None: + details['notifications'] = self.notifications.to_dict() + + if self.order_type is not None: + details['order_type'] = self.order_type + + if self.tools is not None: + details['tools'] = [t.to_dict() for t in self.tools] + + return details + + @property + def json(self) -> str: + '''Get order details as a string representing json.''' + return json.dumps(self.to_dict()) + + +class Product(): + '''Product description for an order detail. + + **Parameters** + + * **item_ids** - Ids of the catalog items to include in the order. + * **product_bundle** - Set of asset types for the catalog items. + * **item_type** - The class of spacecraft and processing characteristics + for the catalog items. + * **fallback_bundle** - *(optional)* In case product_bundle not having + all asset types available, which would result in failed delivery, try + a fallback bundle. Defaults to no fallback. + ''' + def __init__( + self, + item_ids: List[str], + product_bundle: str, + item_type: str, + fallback_bundle: str = None + ): + self.item_ids = item_ids + self.product_bundle = specs.validate_bundle(product_bundle) + + if fallback_bundle is not None: + self.fallback_bundle = specs.validate_bundle(fallback_bundle) + else: + self.fallback_bundle = None + + self.item_type = specs.validate_item_type(item_type, product_bundle) + if fallback_bundle is not None: + specs.validate_item_type(item_type, fallback_bundle) + + @classmethod + def from_dict(cls, details: dict) -> Product: + '''Create Product instance from Orders API spec representation.''' + bundles = details['product_bundle'].split(',') + product_bundle = bundles[0] + try: + fallback_bundle = bundles[1] + except IndexError: + fallback_bundle = None + + return cls(details['item_ids'], + product_bundle, + details['item_type'], + fallback_bundle) + + def to_dict(self) -> dict: + '''Get Orders API spec representation.''' + product_bundle = self.product_bundle + if self.fallback_bundle is not None: + product_bundle = ','.join([product_bundle, self.fallback_bundle]) + product_dict = { + 'item_ids': self.item_ids, + 'item_type': self.item_type, + 'product_bundle': product_bundle + } + return product_dict + + +class Notifications(): + '''Notifications description for an order detail. + + **Parameters** + + * **email** - *(optional)* Enable email notifications for an order. + Defaults to False. + * **webhook_url** - *(optional)* URL for notification when the order is + ready. Defaults to None. + * **webhook_per_order** - *(optional)* Request a single webhook call per + order instead of one call per each delivered item. Defaults to False. + ''' + def __init__( + self, + email: bool = False, + webhook_url: str = None, + webhook_per_order: bool = False + ): + self.email = email + self.webhook_url = webhook_url + self.webhook_per_order = webhook_per_order + + @classmethod + def from_dict(cls, details: dict) -> Notifications: + '''Create Notifications instance from Orders API spec representation. + ''' + return cls(**details) + + def to_dict(self) -> dict: + '''Get Orders API spec representation.''' + details = {} + + if self.email: + details['email'] = self.email + + if self.webhook_url is not None: + details['webhook_url'] = self.webhook_url + + if self.webhook_per_order: + details['webhook_per_order'] = True + + return details + + +class Delivery(): + '''Manages order detail delivery description. + + **Parameters** + + * **archive_type** - *(optional)* Archive order files. Only supports + 'zip'. Defaults to None (do not archive). + * **single_archive** - *(optional)* Archive all bundles together in a + single file. Defaults to False. + * **archive_filename** - *(optional)* The naming convention to use to name + the archive file that is received. Uses the template variables {{name}} + and {{order_id}}. e.g. "{{name}}_{{order_id}}.zip". Defaults to None + (use API naming convention). + ''' + def __init__( + self, + archive_type: str = None, + single_archive: bool = False, + archive_filename: str = None + ): + if archive_type: + self.archive_type = specs.validate_archive_type(archive_type) + else: + self.archive_type = archive_type + + self.single_archive = single_archive + self.archive_filename = archive_filename + + @classmethod + def from_dict(cls, details: dict) -> Delivery: + '''Create Delivery instance from Orders API spec representation.''' + try: + details = copy.deepcopy(details) + cloud_details = details.pop(cls.cloud_key) + cloud_details.update(details) + details = cloud_details + except AttributeError: + # this is just a generic Details class. Nothing fancy to do here. + pass + return cls(**details) + + def to_dict(self) -> dict: + '''Get Orders API spec representation.''' + + details = {} + + if self.archive_type: + details['archive_type'] = self.archive_type + + if self.single_archive: + details['single_archive'] = self.single_archive + + if self.archive_filename: + details['archive_filename'] = self.archive_filename + + return details + + +class AmazonS3Delivery(Delivery): + '''Amazon S3 delivery description for an order detail. + + **Parameters** + + * **aws_access_key_id** - S3 account access key. + * **aws_secret_access_key** - S3 account secret key. + * **bucket** - The name of the bucket that will receive the order output. + * **aws_region** - The region where the bucket lives in AWS. + * **path_prefix** - *(optional)* An optional string that will be prepended + to the files delivered to the bucket. A slash (/) character will be + treated as a "folder". Any other characters will be added as a prefix to + the files. Defaults to None. + * **archive_type** - *(optional)* Archive order files. Only supports + 'zip'. Defaults to None (do not archive). + * **single_archive** - *(optional)* Archive all bundles together in a + single file. Defaults to False. + * **archive_filename** - *(optional)* The naming convention to use to name + the archive file that is received. Uses the template variables {{name}} + and {{order_id}}. e.g. "{{name}}_{{order_id}}.zip". Defaults to None + (use API naming convention). + ''' + cloud_key = 'amazon_s3' + + def __init__( + self, + aws_access_key_id: str, + aws_secret_access_key: str, + bucket: str, + aws_region: str, + path_prefix: str = None, + archive_type: str = False, + single_archive: bool = False, + archive_filename: str = None + ): + self.aws_access_key_id = aws_access_key_id + self.aws_secret_access_key = aws_secret_access_key + self.aws_region = aws_region + self.bucket = bucket + self.path_prefix = path_prefix + + super().__init__(archive_type, single_archive, archive_filename) + + def to_dict(self) -> dict: + '''Get Orders API spec representation.''' + cloud_details = { + 'aws_access_key_id': self.aws_access_key_id, + 'aws_secret_access_key': self.aws_secret_access_key, + 'bucket': self.bucket, + 'aws_region': self.aws_region, + } + + if self.path_prefix: + cloud_details['path_prefix'] = self.path_prefix + + details = super().to_dict() + details[self.cloud_key] = cloud_details + return details + + +class AzureBlobStorageDelivery(Delivery): + '''Azure Blob Storage delivery description for an order detail. + + **Parameters** + + * **account** - Azure account. + * **container** - ABS container name. + * **sas_token** - Shared-Access Signature token. Token should be specified + without a leading '?'. + * **storage_endpoint_suffix** - *(optional)* Deliver order to a + sovereign cloud. Defaults to API default, which is "core.windows.net". + * **path_prefix** - *(optional)* An optional string that will be prepended + to the files delivered to the bucket. A slash (/) character will be + treated as a "folder". Any other characters will be added as a prefix to + the files. Defaults to None. + * **archive_type** - *(optional)* Archive order files. Only supports + 'zip'. Defaults to None (do not archive). + * **single_archive** - *(optional)* Archive all bundles together in a + single file. Defaults to False. + * **archive_filename** - *(optional)* The naming convention to use to name + the archive file that is received. Uses the template variables {{name}} + and {{order_id}}. e.g. "{{name}}_{{order_id}}.zip". Defaults to None + (use API naming convention). + ''' + cloud_key = 'azure_blob_storage' + + def __init__( + self, + account: str, + container: str, + sas_token: str, + storage_endpoint_suffix: str = None, + path_prefix: str = None, + archive_type: str = False, + single_archive: bool = False, + archive_filename: str = None + ): + self.account = account + self.container = container + self.sas_token = sas_token + self.storage_endpoint_suffix = storage_endpoint_suffix + self.path_prefix = path_prefix + + super().__init__(archive_type, single_archive, archive_filename) + + def to_dict(self) -> dict: + '''Get Orders API spec representation.''' + cloud_details = { + 'account': self.account, + 'container': self.container, + 'sas_token': self.sas_token, + } + + if self.storage_endpoint_suffix: + cloud_details['storage_endpoint_suffix'] = \ + self.storage_endpoint_suffix + + if self.path_prefix: + cloud_details['path_prefix'] = self.path_prefix + + details = super().to_dict() + details[self.cloud_key] = cloud_details + return details + + +class GoogleCloudStorageDelivery(Delivery): + '''Google Cloud Storage delivery description for an order detail. + + **Parameters** + + * **bucket** - GCS bucket name. + * **credentials** - JSON-string of service account for bucket. + * **path_prefix** - *(optional)* An optional string that will be prepended + to the files delivered to the bucket. A slash (/) character will be + treated as a "folder". Any other characters will be added as a prefix to + the files. Defaults to None. + * **archive_type** - *(optional)* Archive order files. Only supports + 'zip'. Defaults to None (do not archive). + * **single_archive** - *(optional)* Archive all bundles together in a + single file. Defaults to False. + * **archive_filename** - *(optional)* The naming convention to use to name + the archive file that is received. Uses the template variables {{name}} + and {{order_id}}. e.g. "{{name}}_{{order_id}}.zip". Defaults to None + (use API naming convention). + ''' + cloud_key = 'google_cloud_storage' + + def __init__( + self, + bucket: str, + credentials: str, + path_prefix: str = None, + archive_type: str = False, + single_archive: bool = False, + archive_filename: str = None + ): + self.bucket = bucket + self.credentials = credentials + self.path_prefix = path_prefix + super().__init__(archive_type, single_archive, archive_filename) + + def to_dict(self) -> dict: + '''Get Orders API spec representation.''' + cloud_details = { + 'bucket': self.bucket, + 'credentials': self.credentials, + } + + if self.path_prefix: + cloud_details['path_prefix'] = self.path_prefix + + details = super().to_dict() + details[self.cloud_key] = cloud_details + return details + + +class GoogleEarthEngineDelivery(Delivery): + '''Google Earth Engine delivery description for an order detail. + + **Parameters** + + * **project** - GEE project name. + * **collection** - GEE Image Collection name. + * **archive_type** - *(optional)* Archive order files. Only supports + 'zip'. Defaults to None (do not archive). + * **single_archive** - *(optional)* Archive all bundles together in a + single file. Defaults to False. + * **archive_filename** - *(optional)* The naming convention to use to name + the archive file that is received. Uses the template variables {{name}} + and {{order_id}}. e.g. "{{name}}_{{order_id}}.zip". Defaults to None + (use API naming convention). + ''' + cloud_key = 'google_earth_engine' + + def __init__( + self, + project: str, + collection: str, + archive_type: str = False, + single_archive: bool = False, + archive_filename: str = None + ): + self.project = project + self.collection = collection + super().__init__(archive_type, single_archive, archive_filename) + + def to_dict(self) -> dict: + '''Get Orders API spec representation.''' + cloud_details = { + 'project': self.project, + 'collection': self.collection, + } + + details = super().to_dict() + details[self.cloud_key] = cloud_details + return details + + +class ToolException(Exception): + '''Exceptions thrown by Tool''' + pass + + +class Tool(): + '''Tool description for an order detail. + + See [Tools and Toolchains]( + https://developers.planet.com/docs/orders/tools-toolchains/) + for more information on available tools and tool parameters. + + **Parameters** + + * **name** - Tool name. + * **parameters** - Tool parameters. + ''' + def __init__( + self, + name: str, + parameters: dict + ): + self.name = specs.validate_tool(name) + self.parameters = parameters + + @classmethod + def from_dict(cls, details: dict) -> Tool: + '''Create Tool instance from Orders API spec representation.''' + if len(details) != 1: + raise ToolException( + 'Tool description must have only one item, name: parameters') + name, parameters = details.popitem() + return cls(name, parameters) + + def to_dict(self) -> dict: + '''Get Orders API spec representation.''' + return {self.name: self.parameters} diff --git a/planet/api/orders.py b/planet/api/orders.py index 4ef35cd53..7551ae26d 100644 --- a/planet/api/orders.py +++ b/planet/api/orders.py @@ -12,17 +12,15 @@ # License for the specific language governing permissions and limitations under # the License. """Functionality for interacting with the orders api""" - -import copy +import asyncio import json -import itertools import logging import os import time -from .http import PlanetSession -from . import auth, models -from .. import constants, specs +from .. import constants +from .models import Order, Orders, Request, StreamingBody +from .order_details import OrderDetails BASE_URL = constants.PLANET_BASE_URL + 'compute/ops/' @@ -30,8 +28,9 @@ ORDERS_PATH = 'orders/v2/' BULK_PATH = 'bulk/orders/v2/' -ORDERS_STATES_COMPLETE = ['success', 'partial', 'cancelled'] -ORDERS_STATES = ['queued', 'running', 'failed'] + ORDERS_STATES_COMPLETE +ORDERS_STATES_COMPLETE = ['success', 'partial', 'cancelled', 'failed'] +ORDERS_STATES_IN_PROGRESS = ['queued', 'running'] +ORDERS_STATES = ORDERS_STATES_IN_PROGRESS + ORDERS_STATES_COMPLETE LOGGER = logging.getLogger(__name__) @@ -42,24 +41,16 @@ class OrdersClientException(Exception): class OrdersClient(): - """High-level access to Planet's orders API. - - Basic Usage:: - - from planet.api.orders_client import OrdersClient - cl = OrdersClient('api_key') - order = cl.get_order('order_id') + """High-level asynchronous access to Planet's orders API. - :param api_key: API key to use. Defaults to environment variable or - stored authentication data. - :type api_key: str, optional + :param session: Open session connected to server + :type session: planet.api.http.Session :param base_url: The base URL to use. Defaults to production orders API base url. :type base_url: int, optional """ - def __init__(self, api_key=None, base_url=BASE_URL): - api_key = api_key or auth.find_api_key() - self.auth = api_key and auth.APIKey(api_key) + def __init__(self, session, base_url=BASE_URL): + self._session = session self._base_url = base_url if not self._base_url.endswith('/'): @@ -83,74 +74,56 @@ def _order_url(self, order_id): def _bulk_url(self): return self._base_url + BULK_PATH - def _request(self, url, method, body_type, data=None, params=None): - return models.Request(url, self.auth, body_type=body_type, - method=method, data=data, params=params) - - def _get(self, url, body_type, params=None): - request = self._request(url, 'GET', body_type) - return self._do_request(request) - - def _put(self, url, body_type): - request = self._request(url, 'PUT', body_type) - return self._do_request(request) - - def _post(self, url, body_type, data): - LOGGER.debug(f'post data: {data}') - request = self._request(url, 'POST', body_type, data) - return self._do_request(request) - - def _do_request(self, request): - with PlanetSession() as sess: - body = sess.request(request).body - return body - - def _get_pages(self, url, get_next_fcn, params=None): - request = self._request(url, 'GET', models.JSON, params=params) - - with PlanetSession() as sess: - LOGGER.debug('getting first page') - body = sess.request(request).body - yield body - - next_url = get_next_fcn(body) - while(next_url): - LOGGER.debug('getting next page') - request.url = next_url - body = sess.request(request).body - yield body - next_url = get_next_fcn(body) - - def create_order(self, order_request): + def _request(self, url, method, data=None, params=None, json=None): + return Request(url, method=method, data=data, params=params, json=json) + + async def _do_request(self, request): + '''Submit a request and get response. + + :param request: request to submit + :type request: planet.api.models.Request + :returns: response + :rtype: planet.api.models.Response + ''' + return await self._session.request(request) + + async def create_order(self, order_details): '''Create an order request. - :param order_request: order request details - :type order_request: dict - :return: The ID of the order + :param order_details: order request details + :type order_details: dict or OrderDetails + :returns: The ID of the order :rtype: str ''' - if not isinstance(order_request, OrderDetails): - order_request = OrderDetails(order_request) + if not isinstance(order_details, OrderDetails): + order_details = OrderDetails.from_dict(order_details) + + data = json.dumps(order_details.to_dict()) url = self._orders_url() + req = self._request(url, method='POST', data=data) + resp = await self._do_request(req) - body = self._post(url, models.JSON, order_request.data) - order = Order(body.data) + order = Order(resp.json()) return order.id - def get_order(self, order_id): + async def get_order(self, order_id): '''Get order details by Order ID. :param order_id: The ID of the order :type order_id: str - :returns: :py:Class:`planet.api.models.Order` + :returns: order + :rtype: planet.api.models.Order :raises planet.api.exceptions.APIException: On API error. ''' url = self._order_url(order_id) - body = self._get(url, models.JSON) - return Order(body.data) + req = self._request(url, method='GET') + resp = await self._do_request(req) + + order = Order(resp.json()) + return order - def cancel_order(self, order_id): + async def cancel_order(self, order_id): '''Cancel a queued order. According to the API docs, cancel order should return the cancelled @@ -162,18 +135,17 @@ def cancel_order(self, order_id): :raises planet.api.exceptions.APIException: On API error. ''' url = self._order_url(order_id) - _ = self._put(url, models.Body) + req = self._request(url, method='PUT') + await self._do_request(req) - def cancel_orders(self, order_ids): + async def cancel_orders(self, order_ids=None): '''Cancel queued orders in bulk. - order_ids is required here even if it is an empty string. This is to - avoid accidentally canceeling all orders when only a subset was - desired. - - :param list of str order_ids: The IDs of the orders. If empty, all - orders in a pre-running state will be cancelled. - :returns dict: Results of the bulk cancel request. + :param order_ids: The IDs of the orders. If empty, all orders in a + pre-running state will be cancelled. + :type order_ids: list of str, opt + :returns: results of the bulk cancel request + :rtype: dict :raises planet.api.exceptions.APIException: On API error. ''' url = self._bulk_url() + 'cancel' @@ -181,42 +153,26 @@ def cancel_orders(self, order_ids): if order_ids: cancel_body['order_ids'] = order_ids - # was sending the body as params without json.dumps() - body = self._post(url, models.JSON, json.dumps(cancel_body)) - return body.data + req = self._request(url, method='POST', json=cancel_body) + resp = await self._do_request(req) + return resp.json() - def aggregated_order_stats(self): + async def aggregated_order_stats(self): '''Get aggregated counts of active orders. - :returns dict: aggregated order counts + :returns: Aggregated order counts + :rtype: dict :raises planet.api.exceptions.APIException: On API error. ''' url = self._stats_url() - res = self._get(url, models.JSON) - return res.data + req = self._request(url, method='GET') + resp = await self._do_request(req) + return resp.json() - def download_asset(self, location, filename=None, directory=None, - callback=None, overwrite=True): + async def download_asset(self, location, filename=None, directory=None, + overwrite=True, progress_bar=True): '''Download ordered asset. - If provided, the callback will be invoked 4 different ways: - - * First as ``callback(start=body)`` - * For each chunk of data written as - ``callback(wrote=chunk_size_in_bytes, total=all_byte_cnt)`` - * Upon completion as ``callback(finish=body)`` - * Upon skip as ``callback(skip=body)`` - - simple reporter callback example:: - - def callback(start=None, wrote=None, total=None, - finish=None, skip=None): - if start: print(start) - if wrote: print(wrote) - if total: print(total) - if finish: print(finish) - if skip: print(skip) - :param location: Download location url including download token :type location: str :param filename: Name to assign to downloaded file. Defaults to the @@ -225,106 +181,110 @@ def callback(start=None, wrote=None, total=None, :param directory: Directory to write to. Defaults to current directory. :type directory: str, optional - :param callback: A function handle of the form - ``callback(start, wrote, total, finish, skip)`` that receives write - progress. Defaults to None - :type callback: function, optional :param overwrite: Overwrite any existing files. Defaults to True - :type overwrite: bool + :type overwrite: boolean, optional + :param progress_bar: Show progress bar during download. Defaults to + True. + :type progress_bar: boolean, optional :return: Path to downloaded file. :rtype: str :raises planet.api.exceptions.APIException: On API error. ''' - body = self._get(location, models.Body) - dl_path = os.path.join(directory or '.', filename or body.name) - body.write_to_file(dl_path, overwrite=overwrite, callback=callback) + req = self._request(location, method='GET') + + async with self._session.stream(req) as resp: + body = StreamingBody(resp) + dl_path = os.path.join(directory or '.', filename or body.name) + await body.write(dl_path, + overwrite=overwrite, + progress_bar=progress_bar) return dl_path - def download_order(self, order_id, directory=None, callback=None, - overwrite=True): + async def download_order(self, order_id, directory=None, overwrite=True, + progress_bar=False): '''Download all assets in an order. - Uses `download_asset` to downloads each asset in an order. - The arguments - `directory`, `callback`, and `overwrite` are used as described in - :py:meth:`download_asset` - :param order_id: The ID of the order :type order_id: str :param directory: Directory to write to. Defaults to current directory. :type directory: str, optional - :param callback: A function handle of the form - ``callback(start, wrote, total, finish, skip)`` that receives write - progress. Invoked as described in Defaults to None - :type callback: function, optional :param overwrite: Overwrite any existing files. Defaults to True - :type overwrite: bool, optional + :type overwrite: boolean, optional + :param progress_bar: Show progress bar during download. Defaults to + True. + :type progress_bar: boolean, optional :return: Paths to downloaded files. :rtype: list of str :raises planet.api.exceptions.APIException: On API error. ''' - order = self.get_order(order_id) + order = await self.get_order(order_id) locations = order.locations LOGGER.info( f'downloading {len(locations)} assets from order {order_id}' ) - filenames = [self.download_asset(location, - directory=directory, - callback=callback, - overwrite=overwrite) + filenames = [await self.download_asset(location, + directory=directory, + overwrite=overwrite, + progress_bar=progress_bar) for location in locations] return filenames - def wait_for_complete(self, order_id, wait=10, callback=None): - '''Poll for order status until order is complete. - - If provided, the callback will be invoked as: - - * ``callback(state=state)`` - - simple reporter callback example:: - - def callback(state): - print(state) + async def poll(self, order_id, state=None, wait=10, verbose=False): + '''Poll for order status until order reaches desired state. :param order_id: The ID of the order :type order_id: str - :param int wait: Time (in seconds) between polls + :param state: State to poll until. If multiple, use list. Defaults to + any completed state. + :type state: str, list of str + :param wait: Time (in seconds) between polls :type wait: int - :param callback: A function handle of the form - ``callback(state)`` that receives poll progress. Defaults to None - :type callback: function, optional + :param verbose: Print current state at each poll. Defaults to False + :type verbose: bool :return: Completed state of the order :rtype: str :raises planet.api.exceptions.APIException: On API error. + :raises OrdersClientException: If state is not supported. + ''' completed = False + + if state: + if state not in ORDERS_STATES: + raise OrdersClientException( + f'{state} should be one of' + f'{ORDERS_STATES}') + states = [state] + else: + states = ORDERS_STATES_COMPLETE + while not completed: t = time.time() - order = self.get_order(order_id) + order = await self.get_order(order_id) state = order.state - callback(state=state) - LOGGER.info(f'order state: {state}') - - completed = state in ORDERS_STATES_COMPLETE + msg = f'order {order_id} state: {state}' + LOGGER.info(msg) + if verbose: + print(msg) + completed = state in states if not completed: sleep_time = max(wait-(time.time()-t), 0) LOGGER.info(f'sleeping {sleep_time}s') - time.sleep(sleep_time) + await asyncio.sleep(sleep_time) return state - def list_orders(self, state=None, limit=None): + async def list_orders(self, state=None, limit=None): '''Get all order requests. :param state: Filter orders to given state. Defaults to None :type state: str, optional - :param state: Limit orders to given limit. Defaults to None - :type state: int, optional + :param limit: Limit orders to given limit. Defaults to None + :type limit: int, optional :return: User :py:Class:`planet.api.models.Order` objects that match the query - :rtype: iterator + :rtype: list :raises planet.api.exceptions.APIException: On API error. ''' url = self._orders_url() @@ -334,11 +294,13 @@ def list_orders(self, state=None, limit=None): else: params = None - orders = self._get_orders(url, params) + return await self._get_orders(url, params=params, limit=limit) - if limit: - orders = itertools.islice(orders, limit) - return orders + async def _get_orders(self, url, params=None, limit=None): + request = self._request(url, 'GET', params=params) + + orders_paged = Orders(request, self._do_request, limit=limit) + return [o async for o in orders_paged] @staticmethod def _check_state(state): @@ -347,148 +309,3 @@ def _check_state(state): f'Order state (\'{state}\') should be one of: ' f'{ORDERS_STATES}' ) - - def _get_orders(self, url, params=None): - get_next_fcn = Orders.next_link - bodies = self._get_pages(url, get_next_fcn, params=params) - orders = Orders.items_iter(bodies) - return orders - - -class Orders(): - # TODO: the delegation between Orders and OrdersClient could - # likely be improved here - @staticmethod - def next_link(body): - try: - next_link = body.data['_links']['next'] - LOGGER.debug(f'next link: {next_link}') - except KeyError: - next_link = False - return next_link - - @staticmethod - def items_iter(bodies): - def _get_orders(body): - orders = body.data['orders'] - return (Order(o) for o in orders) - - all_orders = itertools.chain.from_iterable( - (_get_orders(body) for body in bodies)) - return all_orders - - -class Order(): - '''Managing description of an order returned from Orders API. - - :param data: Response json describing order - :type data: dict - ''' - LINKS_KEY = '_links' - RESULTS_KEY = 'results' - LOCATION_KEY = 'location' - - def __init__(self, data): - self.data = data - - def __str__(self): - return " " + json.dumps(self.data) - - @property - def results(self): - '''Results for each item in order. - - :return: result for each item in order - :rtype: list of dict - ''' - links = self.data[self.LINKS_KEY] - results = links.get(self.RESULTS_KEY, None) - return results - - @property - def locations(self): - '''Download locations for order results. - - :return: download locations in order - :rtype: list of str - ''' - return list(r[self.LOCATION_KEY] for r in self.results) - - @property - def state(self): - '''State of the order. - - :return: state of order - :rtype: str - ''' - return self.data['state'] - - @property - def id(self): - '''ID of the order. - - :return: id of order - :rtype: str - ''' - return self.data['id'] - - -class OrderDetailsException(Exception): - """Exceptions thrown by OrderDetails""" - pass - - -class OrderDetails(): - '''Validating and preparing an order description for submission. - - :param details: Specification of order to be created. - :type details: dict - :raises OrderDetailsException: When provided `item_type` or - `product_bundle` is not supported. - ''' - BUNDLE_KEY = 'product_bundle' - - def __init__(self, details): - self._data = copy.deepcopy(details) - self._validate_details() - - @property - def data(self): - '''The order details as a string representing json. - - :return: order details json - :rtype: str - ''' - return json.dumps(self._data) - - def _validate_details(self): - '''Try valiently to get details to match schema. - - Checks that details match the schema and, where possible, change - the details to fit the schema (e.g. change capitalization') - ''' - products = self._data['products'] - for p in products: - self._validate_bundle(p) - self._validate_item_type(p) - - def _validate_bundle(self, product): - supported = specs.get_product_bundles() - self._substitute_supported(product, self.BUNDLE_KEY, supported) - - def _validate_item_type(self, product): - key = 'item_type' - bundle = product[self.BUNDLE_KEY] - supported = specs.get_item_types(bundle) - self._substitute_supported(product, key, supported) - - @staticmethod - def _substitute_supported(product, key, supported): - try: - matched_type = specs.get_match(product[key], supported) - LOGGER.debug(f'{key}: {matched_type}') - product[key] = matched_type - except(StopIteration): - raise OrderDetailsException( - f'{key} - \'{product[key]}\' not in {supported}' - ) diff --git a/planet/api/utils.py b/planet/api/utils.py deleted file mode 100644 index 78e1f0c8c..000000000 --- a/planet/api/utils.py +++ /dev/null @@ -1,155 +0,0 @@ -# Copyright 2015 Planet Labs, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -'''Helpful and commonly-used functionality''' -import mimetypes -import random -import re -import string - -from requests.compat import urlparse - - -def get_filename(response): - """Derive a filename from the given response. - - >>> import requests - >>> from planet.api import utils - >>> response = requests.Response() - >>> response.headers = { - ... 'date': 'Thu, 14 Feb 2019 16:13:26 GMT', - ... 'last-modified': 'Wed, 22 Nov 2017 17:22:31 GMT', - ... 'accept-ranges': 'bytes', - ... 'content-type': 'image/tiff', - ... 'content-length': '57350256', - ... 'content-disposition': 'attachment; filename="open_california.tif"' - ... } - >>> response.url = 'https://planet.com/path/to/example.tif?foo=f6f1' - >>> print(utils.get_filename(response)) - open_california.tif - >>> del response - >>> response = requests.Response() - >>> response.headers = { - ... 'date': 'Thu, 14 Feb 2019 16:13:26 GMT', - ... 'last-modified': 'Wed, 22 Nov 2017 17:22:31 GMT', - ... 'accept-ranges': 'bytes', - ... 'content-type': 'image/tiff', - ... 'content-length': '57350256' - ... } - >>> response.url = 'https://planet.com/path/to/example.tif?foo=f6f1' - >>> print(utils.get_filename(response)) - example.tif - >>> del response - >>> response = requests.Response() - >>> response.headers = { - ... 'date': 'Thu, 14 Feb 2019 16:13:26 GMT', - ... 'last-modified': 'Wed, 22 Nov 2017 17:22:31 GMT', - ... 'accept-ranges': 'bytes', - ... 'content-type': 'image/tiff', - ... 'content-length': '57350256' - ... } - >>> response.url = 'https://planet.com/path/to/oops/' - >>> print(utils.get_filename(response)) #doctest:+SKIP - planet-bFL6pwki.tif - >>> - - :param response: An HTTP response. - :type response: :py:class:`requests.Response` - :returns: a filename (i.e. ``basename``) - :rtype: str - """ - name = (get_filename_from_headers(response.headers) or - get_filename_from_url(response.url) or - get_random_filename(response.headers.get('content-type'))) - return name - - -def get_filename_from_headers(headers): - """Get a filename from the Content-Disposition header, if available. - - >>> from planet.api import utils - >>> headers = { - ... 'date': 'Thu, 14 Feb 2019 16:13:26 GMT', - ... 'last-modified': 'Wed, 22 Nov 2017 17:22:31 GMT', - ... 'accept-ranges': 'bytes', - ... 'content-type': 'image/tiff', - ... 'content-length': '57350256', - ... 'content-disposition': 'attachment; filename="open_california.tif"' - ... } - >>> name = utils.get_filename_from_headers(headers) - >>> print(name) - open_california.tif - >>> - >>> headers.pop('content-disposition', None) - 'attachment; filename="open_california.tif"' - >>> name = utils.get_filename_from_headers(headers) - >>> print(name) - None - >>> - - :param headers dict: a ``dict`` of response headers - :returns: a filename (i.e. ``basename``) - :rtype: str or None - """ - cd = headers.get('content-disposition', '') - match = re.search('filename="?([^"]+)"?', cd) - return match.group(1) if match else None - - -def get_filename_from_url(url): - """Get a filename from a URL. - - >>> from planet.api import utils - >>> urls = [ - ... 'https://planet.com/', - ... 'https://planet.com/path/to/', - ... 'https://planet.com/path/to/example.tif', - ... 'https://planet.com/path/to/example.tif?foo=f6f1&bar=baz', - ... 'https://planet.com/path/to/example.tif?foo=f6f1&bar=baz#quux' - ... ] - >>> for url in urls: - ... print('{} -> {}'.format(url, utils.get_filename_from_url(url))) - ... - https://planet.com/ -> None - https://planet.com/path/to/ -> None - https://planet.com/path/to/example.tif -> example.tif - https://planet.com/path/to/example.tif?foo=f6f1&bar=baz -> example.tif - https://planet.com/path/to/example.tif?foo=f6f1&bar=baz#quux -> example.tif - >>> - - :returns: a filename (i.e. ``basename``) - :rtype: str or None - """ - path = urlparse(url).path - name = path[path.rfind('/')+1:] - return name or None - - -def get_random_filename(content_type=None): - """Get a pseudo-random, Planet-looking filename. - - >>> from planet.api import utils - >>> print(utils.get_random_filename()) #doctest:+SKIP - planet-61FPnh7K - >>> print(utils.get_random_filename('image/tiff')) #doctest:+SKIP - planet-V8ELYxy5.tif - >>> - - :returns: a filename (i.e. ``basename``) - :rtype: str - """ - extension = mimetypes.guess_extension(content_type or '') or '' - characters = string.ascii_letters + '0123456789' - letters = ''.join(random.sample(characters, 8)) - name = 'planet-{}{}'.format(letters, extension) - return name diff --git a/planet/specs.py b/planet/specs.py index 73f1492e9..4cc1e369b 100644 --- a/planet/specs.py +++ b/planet/specs.py @@ -20,18 +20,72 @@ DATA_DIR = 'data' PRODUCT_BUNDLE_SPEC_NAME = 'orders_product_bundle_2020_03_10.json' +SUPPORTED_TOOLS = ['band_math', 'clip', 'composite', 'coregister', + 'file_format', 'reproject', 'tile', 'toar', 'harmonize'] +SUPPORTED_ORDER_TYPES = ['full', 'partial'] +SUPPORTED_ARCHIVE_TYPES = ['zip'] LOGGER = logging.getLogger(__name__) +class SpecificationException(Exception): + '''No match was found''' + pass + + +def validate_bundle(bundle): + supported = get_product_bundles() + return _validate_field(bundle, supported, 'product_bundle') + + +def validate_item_type(item_type, bundle): + bundle = validate_bundle(bundle) + supported = get_item_types(bundle) + return _validate_field(item_type, supported, 'item_type') + + +def validate_order_type(order_type): + return _validate_field(order_type, SUPPORTED_ORDER_TYPES, 'order_type') + + +def validate_archive_type(archive_type): + return _validate_field( + archive_type, SUPPORTED_ARCHIVE_TYPES, 'archive_type') + + +def validate_tool(tool): + return _validate_field(tool, SUPPORTED_TOOLS, 'tool') + + +def _validate_field(value, supported, field_name=None): + try: + value = get_match(value, supported) + except(NoMatchException): + msg = f'\'{value}\' not in {list(supported)}' + if field_name: + msg = f'{field_name} - ' + msg + raise SpecificationException(msg) + return value + + +class NoMatchException(Exception): + '''No match was found''' + pass + + def get_match(test_entry, spec_entries): '''Find and return matching spec entry regardless of capitalization. This is helpful for working with the API spec, where the capitalization is hard to remember but must be exact otherwise the API throws an exception.''' - return next(t for t in spec_entries - if t.lower() == test_entry.lower()) + try: + match = next(t for t in spec_entries + if t.lower() == test_entry.lower()) + except(StopIteration): + raise NoMatchException + + return match def get_product_bundles(): diff --git a/requirements-dev.txt b/requirements-dev.txt index f1f45a104..c7129f646 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,3 +1,4 @@ -requests-mock==1.8.0 +pytest-asyncio==0.14.0 +respx==0.16.3 sphinx-autobuild==2020.9.1 tox==3.20.1 diff --git a/requirements.txt b/requirements.txt index 7d3435553..2400422f3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1,2 @@ -requests==2.25.0 +httpx==0.16.1 +tqdm==4.56.0 diff --git a/scripts/order_download.py b/scripts/order_download.py deleted file mode 100644 index c52576f61..000000000 --- a/scripts/order_download.py +++ /dev/null @@ -1,193 +0,0 @@ -# Copyright 2020 Planet Labs, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -''' -Test CLI download. This creates an order, waits for it to be ready, then -downloads it, and confirms all files were downloaded. - -Because download is spotty, this runs download multiple times and ensures that -each time all files were downloaded. -''' -import argparse -import json -import logging -import os -import subprocess -import sys -import tempfile -import time - -# from click.testing import CliRunner -import requests -from requests.auth import HTTPBasicAuth - - -# logging.basicConfig(filename='example.log', level=logging.DEBUG) -# logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) - -logging.basicConfig( - stream=sys.stderr, level=logging.DEBUG, - format='%(asctime)s - %(name)s - %(levelname)s - %(message)s' -) - -logger = logging.getLogger(__name__) -# logging.getLogger('planet.api.dispatch').setLevel(logging.WARNING) -# logging.getLogger('urllib3.connectionpool').setLevel(logging.WARNING) -# API Key stored as an env variable -PLANET_API_KEY = os.getenv('PL_API_KEY') - -ORDERS_URL = 'https://api.planet.com/compute/ops/orders/v2' - -# equivalent to: -# planet orders create --item-type SkySatScene --bundle analytic \ -# --id 20200505_193841_ssc4d1_0018 --name 20200505_193841_ssc4d1_0018 -ORDER_REQUEST = { - "name": "20200505_193841_ssc4d1_0018", - "products": [ - { - "item_ids": [ - "20200505_193841_ssc4d1_0018" - ], - "item_type": "SkySatScene", - "product_bundle": "analytic" - } - ], - "state": "success", - "delivery": { - "archive_filename": "{{name}}_{{order_id}}.zip", - "archive_type": "zip", - "single_archive": True - }, - "tools": [ - { - "reproject": { - "kernel": "cubic", - "projection": "EPSG:4326" - } - } - ] -} - - -def submit_order(request, auth): - auth = HTTPBasicAuth(PLANET_API_KEY, '') - - # set content type to json - headers = {'content-type': 'application/json'} - - response = requests.post(ORDERS_URL, - data=json.dumps(request), - auth=auth, - headers=headers) - order_id = response.json()['id'] - return order_id - - -def poll_for_success(order_url, auth, num_loops=50): - count = 0 - while(count < num_loops): - count += 1 - r = requests.get(order_url, auth=auth) - response = r.json() - state = response['state'] - logger.info(state) - end_states = ['success', 'failed', 'partial'] - if state in end_states: - break - time.sleep(10) - if state != 'success': - raise Exception('order did not succeed') - - -def test_download_order(order_id, num_runs): - # # these are the files inside the zip - # expected_files = [ - # '20200505_193841_ssc4d1_0018_analytic_reproject.tif', - # '20200505_193841_ssc4d1_0018_analytic_udm_reproject.tif', - # '20200505_193841_ssc4d1_0018_metadata.json', - # 'manifest.json' - # ] - - expected_files = [ - '20200505_193841_ssc4d1_0018_53d1209a-af58-40ce-974f-3570f4e20326.zip', - 'manifest.json' - ] - - messages = [] - for i in range(num_runs): - logging.debug('TEST {}'.format(i)) - files = download_order_cli(order_id) - if not len(files) == len(expected_files): - messages.append('TEST {}'.format(i)) - messages.append('{} != {}'.format(len(files), len(expected_files))) - for f in expected_files: - if f not in files: - messages.append('{} not found'.format(f)) - - if len(messages): - for m in messages: - logger.info(m) - else: - logger.info('Success!') - - -def download_order_cli(order_id): - with tempfile.TemporaryDirectory() as tmpdirname: - cmd = ['planet', '-vv', 'orders', 'download', '--dest', tmpdirname, - order_id] - - logging.debug(cmd) - _run_command_line(cmd) - - files = os.listdir(tmpdirname) - logger.debug(files) - return files - - -def _run_command_line(cmds, stdout=None, stderr=None): - try: - cmds = [str(x) for x in cmds] - logging.debug(' '.join(cmds)) - subprocess.check_call(cmds, stdout=stdout, stderr=stderr) - except OSError: - raise OSError('{} not found.'.format(cmds[0])) - - -def get_parser(): - aparser = argparse.ArgumentParser( - description='Submit and download an order') - aparser.add_argument('-o', '--oid', - help='order id') - aparser.add_argument('-r', '--runs', type=int, default=5, - help='number of runs') - return aparser - - -if __name__ == '__main__': - args = get_parser().parse_args(sys.argv[1:]) - logger.debug(args) - - auth = HTTPBasicAuth(PLANET_API_KEY, '') - - if args.oid: - order_id = args.oid - else: - logging.debug('submitting order') - order_id = submit_order(ORDER_REQUEST, auth) - - order_url = ORDERS_URL + '/' + order_id - poll_for_success(order_url, auth) - - test_download_order(order_id, args.runs) - logger.info('order id: {}'.format(order_id)) diff --git a/scripts/orders_api.py b/scripts/orders_api.py deleted file mode 100644 index cbad47389..000000000 --- a/scripts/orders_api.py +++ /dev/null @@ -1,188 +0,0 @@ -# Copyright 2020 Planet Labs, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -''' -Test interactions with the API to get real responses for test mocking -''' -import json -import logging -import os -import sys -import time - -from planet.api import http, models, OrdersClient - -ORDERS_URL = 'https://api.planet.com/compute/ops/orders/v2/' - -API_KEY = os.getenv('PL_API_KEY') - -LOGGER = logging.getLogger(__name__) -logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) - - -class Request(object): - def __init__(self, url, body_type, method, headers, data=None): - self.url = url - self.body_type = body_type - self.method = method - self.headers = headers - - self.params = None - self.data = data - - -def trigger_throttle(): - url = ORDERS_URL - body_type = models.Order - method = 'GET' - headers = {'Authorization': 'api-key %s' % API_KEY} - req = Request(url, body_type, method, headers) - - with http.PlanetSession() as sess: - t = time.time() - for i in range(15): - resp = sess.request(req) - print(time.time() - t) - print(resp) - - - -def trigger_unauth(): - url = ORDERS_URL - body_type = models.Order - method = 'GET' - headers = {'Authorization': 'api-key %s' % 'nope'} - req = Request(url, body_type, method, headers) - - with http.PlanetSession() as sess: - resp = sess.request(req) - print(resp) - - -TEST_ORDER = { - "name": "test_order", - "products": [ - { - "item_ids": [ - "3949357_1454705_2020-12-01_241c", - "3949357_1454805_2020-12-01_241c" - ], - "item_type": "PSOrthoTile", - "product_bundle": "analytic" - } - ] - } - - -def create_order(): - url = ORDERS_URL - body_type = models.Order - method = 'POST' - headers = { - 'Authorization': 'api-key %s' % API_KEY, - 'Content-Type': 'application/json' - } - - data = json.dumps(TEST_ORDER) - req = Request(url, body_type, method, headers, data=data) - - with http.PlanetSession() as sess: - resp = sess.request(req) - print(resp) - print(resp.body.get_raw()) - - -def create_order_client(): - cl = OrdersClient() - oid = cl.create_order(TEST_ORDER) - print(oid) - return oid - - -def cancel_order_client(oid): - cl = OrdersClient() - cancelled = cl.cancel_order(oid) - print(cancelled) - # print(cancelled.response.headers) - # print(cancelled.response.content) - - -def cancel_orders_client(oid): - cl = OrdersClient() - cancelled = cl.cancel_orders([oid]) - print(cancelled) - - -def cancel_orders_client_all(): - cl = OrdersClient() - cancelled = cl.cancel_orders([]) - print(cancelled) - - -def list_orders(): - # for i in range(50): - # create_order_client() - - cl = OrdersClient() - # orders = cl.list_orders() - orders = cl.list_orders(state='success') - # orders = cl.list_orders(state='failed') - for o in orders: - print(o.state) - - -def download_asset(): - cl = OrdersClient() - # # metadata - # location = "https://api.planet.com/compute/ops/download/?token=eyJhbGciOiJIUzUxMiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE2MDc3MjU4MDgsInN1YiI6Ikd0TC9Nb1EvSU1UTzRFTWxrU3Fxc09TaUtpUDZVbldSc1lzdlRlYlY4elNzOXUzSzRlSk9aQ0J4N2ZvY3JKOGxyY1FzVHI2aHVlK2NnbEpZNGdOa1p3PT0iLCJ0b2tlbl90eXBlIjoiZG93bmxvYWQtYXNzZXQtc3RhY2siLCJhb2kiOiIiLCJhc3NldHMiOlt7Iml0ZW1fdHlwZSI6IiIsImFzc2V0X3R5cGUiOiIiLCJpdGVtX2lkIjoiIn1dLCJ1cmwiOiJodHRwczovL3N0b3JhZ2UuZ29vZ2xlYXBpcy5jb20vY29tcHV0ZS1vcmRlcnMtbGl2ZS9mOGRhMGEzZS0xNzRmLTQzNTktYjA4OC1hOTYxYWM3NmYwZTcvUFNPcnRob1RpbGUvMzk0OTM1N18xNDU0NzA1XzIwMjAtMTItMDFfMjQxY19tZXRhZGF0YS5qc29uP0V4cGlyZXM9MTYwNzcyNTgwOFx1MDAyNkdvb2dsZUFjY2Vzc0lkPWNvbXB1dGUtZ2NzLXN2Y2FjYyU0MHBsYW5ldC1jb21wdXRlLXByb2QuaWFtLmdzZXJ2aWNlYWNjb3VudC5jb21cdTAwMjZTaWduYXR1cmU9QnRqQUtqUXY2R1hUWnNjYm5MUU45UWw4JTJCcG5neFBDek1hVkhVeTRkUmR6NU5JNW5YSUZ1NkxWOTZrSzkyVTBzMjF6WkJ1bmVpZ0VVdVRBd3FpaFdsSmdsc29DTGJGMXl3NnIzSllNdDNSSlRvaU11Qm0yVkRZQnVzWVlCZWlSUnMxaVQyeFprTnhmenZPcTQ2QWQyQTB1dzNBeEpzalVJZnNhMW5SeWtEVHE0dVFTOUVpRnhFYjJOYWJFOGk2Z3lySWM5dUVwQkRxSXZVOSUyQk9mRTdXc3hDQ2hFNmlJclJoZ1ZkaUk5SVJlSDNTSk1oQ3Zja2Z1aXVySjRqRWl3VU5MeFNKRExxMmVYMkF3cldFT3hGeDhxSDcxZ0dnMjk0MmtYSkNzeXVaTEw5YUtWNGM1ckEzNkJDMG1QT2dncW9vUzQ1ZzUwODBCa2JOOCUyQmw5eUJsTmxBJTNEJTNEIiwic291cmNlIjoiT3JkZXJzIFNlcnZpY2UifQ.zjcMsv4vntuzSnBCSjPotlGHkV0F6QaL9WgGYYHYosCh6xKEoEgdA90rWPSm90nc5kdP3ch8o9cbory5N4_cdA" # noqa E501 - - # img - - location = "https://api.planet.com/compute/ops/download/?token=eyJhbGciOiJIUzUxMiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE2MDc5MDMzMzcsInN1YiI6IlJrM1BCNGIrY2VHb1RzQ2VxbGIwKzRFZmRnUWRYaGJiY3p3N1dMR0paczB5cGVOcEVqTVhGdWtqOWg5dVlMdkZMb05ZVGdNdTgrOTJrajdyWHg5VWJnPT0iLCJ0b2tlbl90eXBlIjoiZG93bmxvYWQtYXNzZXQtc3RhY2siLCJhb2kiOiIiLCJhc3NldHMiOlt7Iml0ZW1fdHlwZSI6IiIsImFzc2V0X3R5cGUiOiIiLCJpdGVtX2lkIjoiIn1dLCJ1cmwiOiJodHRwczovL3N0b3JhZ2UuZ29vZ2xlYXBpcy5jb20vY29tcHV0ZS1vcmRlcnMtbGl2ZS84NWMwODkxOC1mNGJmLTRlMjYtYjE2Yy0xMjhlMzJhNjZjMjYvUFNPcnRob1RpbGUvMzk0OTM1N18xNDU0NzA1XzIwMjAtMTItMDFfMjQxY19CR1JOX0FuYWx5dGljLnRpZj9FeHBpcmVzPTE2MDc5MDMzMzdcdTAwMjZHb29nbGVBY2Nlc3NJZD1jb21wdXRlLWdjcy1zdmNhY2MlNDBwbGFuZXQtY29tcHV0ZS1wcm9kLmlhbS5nc2VydmljZWFjY291bnQuY29tXHUwMDI2U2lnbmF0dXJlPW5BVEVIYzR5OUw2RXQ0Y3glMkJkSXU3dHVvOVRQMFBoQ1R1bXFsYW1ZcmJCRHJnMUYxdVJzSiUyQmpXRTkxcFhwR3JINUxnJTJGaVRGcnglMkJlRjZGU0dKcTFua3Rmd2k5YTFabDZnZFlCUUhxSVVNVWFRbnRpY29ibmlZajVtJTJCZWJhbGpuOXlubjBvTHNjOWRDMHV0b1N3TllIMnZOTU9yNVJUaGF6ZjVhaG9ES1JVOE9mV0QlMkZ4dWZXWXdWSHhUTiUyQlhqVlpmeTVXJTJGTW02TWxpMDglMkZjJTJGRlI1JTJGVnJUUFZRZERZMlU3cktEYUJ2SDhBTWVwcHVWT2ZCNXNYdDBnQXMzVzlIYkhNdVdaJTJGbkxEb0EzVlBqZzUxMFVzTUN6VXNRbGNIWWMlMkZnUDZUTVdZWVB3SHFRNlFSeVA4NlpLOGJ2V1F4JTJCYzJYYW9GWHE1OEllazFHJTJCOHFxc0VONVZ1USUzRCUzRCIsInNvdXJjZSI6Ik9yZGVycyBTZXJ2aWNlIn0.16YtuLb4qzv2Q_DNe1068v-qY9xpB_JpwGks9x8IlAb0Sh1lTA_YpibSXEyG4Obys5p1fgTd3aA9H-tawgrW0Q" # noqa E501 - - filename = cl.download_asset(location) - LOGGER.warning(filename) - - -def download_order(): - oid = create_order_client() - - cl = OrdersClient() - cl.download_order(oid) - - -def wait_for_complete(): - oid = create_order_client() - - cl = OrdersClient() - cl.wait_for_complete(oid) - - -def run(): - # trigger_unauth() - # trigger_throttle() - # create_order() - # create_order_client() - # oid = create_order_client(); cancel_order_client(oid) # noqa: E702 - # oid = create_order_client(); cancel_orders_client(oid) # noqa: E702 - # create_order_client(); cancel_orders_client_all() # noqa: E702 - # list_orders() - # LOGGER.warning('API KEY: {}'.format(API_KEY)) - # download_asset() - # download_order() - wait_for_complete() - - -if __name__ == '__main__': - run() diff --git a/setup.py b/setup.py index c89543372..775652d10 100644 --- a/setup.py +++ b/setup.py @@ -17,6 +17,7 @@ # Get the long description from the relevant file +# TODO: consider moving to markdown from rst at this point try: with codecs_open('README.rst', encoding='utf-8') as f: long_description = f.read() @@ -35,9 +36,9 @@ test_requires = [ - 'mock', 'pytest', - 'requests-mock', + 'pytest-asyncio', + 'respx' ] dev_requires = [ @@ -47,8 +48,6 @@ 'pytest-cov', 'sphinx', 'wheel', - 'mock', - 'requests-mock', ] setup(name='planet', @@ -67,18 +66,18 @@ 'Topic :: Utilities' ], keywords='planet api client', - author=u"Ian Schneider", - author_email='ischneider@planet.com', + author='Jennifer Reiber Kyle', + author_email='jennifer.kyle@planet.com', url='https://github.com/planetlabs/planet-client-python', license='Apache 2.0', packages=find_packages(exclude=['examples', 'tests']), data_files=[('', ['LICENSE'])], include_package_data=True, zip_safe=False, + python_requires='>=3.7', install_requires=[ - 'click', - 'requests', - 'requests_futures == 0.9.7', + 'httpx>=0.16', + 'tqdm>=4.56', 'pywin32 >= 1.0;platform_system=="Windows"' ], extras_require={ @@ -88,5 +87,5 @@ entry_points=""" [console_scripts] planet=planet.scripts:main - """ + """, ) diff --git a/tests/conftest.py b/tests/conftest.py index 58eae07fd..2658c46bf 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import json import os from pathlib import Path @@ -25,3 +26,31 @@ def open_test_img(): img_path = _test_data_path / 'test_sm.tif' with open(img_path, 'rb') as img: yield img + + +def _get_file_json(filename): + file_path = _test_data_path / filename + return json.load(open(file_path, 'r')) + + +@pytest.fixture +def order_description(): + filename = 'order_description_b0cb3448-0a74-11eb-92a1-a3d779bb08e0.json' + return _get_file_json(filename) + + +@pytest.fixture +def order_details(): + filename = 'order_details_psorthotile_analytic.json' + return _get_file_json(filename) + + +@pytest.fixture +def orders_page(): + filename = 'orders_page.json' + return _get_file_json(filename) + + +@pytest.fixture +def oid(): + return 'b0cb3448-0a74-11eb-92a1-a3d779bb08e0' diff --git a/tests/data/order_description_b0cb3448-0a74-11eb-92a1-a3d779bb08e0.json b/tests/data/order_description_b0cb3448-0a74-11eb-92a1-a3d779bb08e0.json index 7a1650211..a4e55f5b8 100644 --- a/tests/data/order_description_b0cb3448-0a74-11eb-92a1-a3d779bb08e0.json +++ b/tests/data/order_description_b0cb3448-0a74-11eb-92a1-a3d779bb08e0.json @@ -3,7 +3,22 @@ "_self": "string", "results": [ { - "location": "/foo" + "delivery": "success", + "expires_at": "2020-12-04T22:25:30.262Z", + "location": "location1", + "name": "name1" + }, + { + "delivery": "success", + "expires_at": "2020-12-04T22:25:30.264Z", + "location": "location2", + "name": "name2" + }, + { + "delivery": "success", + "expires_at": "2020-12-04T22:25:30.267Z", + "location": "location3", + "name": "name3" } ] }, diff --git a/tests/data/order_details_psorthotile_analytic.json b/tests/data/order_details_psorthotile_analytic.json index 21e09d66d..8ef8c9d41 100644 --- a/tests/data/order_details_psorthotile_analytic.json +++ b/tests/data/order_details_psorthotile_analytic.json @@ -11,17 +11,11 @@ ], "delivery": { "single_archive": true, - "archive_type": "string", - "archive_filename": "string", - "layout": { - "format": "standard" - }}, + "archive_type": "zip", + "archive_filename": "string" + }, "tools": [ - { - "anchor_item": "string", - "method": "string", - "anchor_bundle": "string", - "strict": true - } + {"file_format": "COG"}, + {"toar": {"scale_factor": 10000}} ] } diff --git a/tests/data/orders_page.json b/tests/data/orders_page.json new file mode 100644 index 000000000..9fd4c7631 --- /dev/null +++ b/tests/data/orders_page.json @@ -0,0 +1,52 @@ +{ + "_links": { + "_self": "https://api.planet.com/compute/ops/orders/v2", + "next": "https://api.planet.com/compute/ops/orders/v2?page_marker=2021-01-17T02%3A06%3A01.706517Z" + }, + "orders": [ + { + "_links": { + "_self": "https://api.planet.com/compute/ops/orders/v2/f05b1ed7-11f0-43da-960c-a624f7c355c8" + }, + "created_on": "2021-02-03T01:40:08.332Z", + "error_hints": [], + "id": "f05b1ed7-11f0-43da-960c-a624f7c355c8", + "last_message": "Manifest delivery completed", + "last_modified": "2021-02-03T01:43:31.015Z", + "name": "test_order", + "products": [ + { + "item_ids": [ + "3949357_1454705_2020-12-01_241c", + "3949357_1454805_2020-12-01_241c" + ], + "item_type": "PSOrthoTile", + "product_bundle": "analytic" + } + ], + "state": "success" + }, + { + "_links": { + "_self": "https://api.planet.com/compute/ops/orders/v2/8d4799c4-5291-40c0-a7f5-adb9a974455d" + }, + "created_on": "2021-02-03T01:40:07.359Z", + "error_hints": [], + "id": "8d4799c4-5291-40c0-a7f5-adb9a974455d", + "last_message": "Manifest delivery completed", + "last_modified": "2021-02-03T01:43:31.007Z", + "name": "test_order", + "products": [ + { + "item_ids": [ + "3949357_1454705_2020-12-01_241c", + "3949357_1454805_2020-12-01_241c" + ], + "item_type": "PSOrthoTile", + "product_bundle": "analytic" + } + ], + "state": "success" + } + ] +} diff --git a/tests/integration/test_orders_api.py b/tests/integration/test_orders_api.py index e49574d4c..4b2dbfeb1 100644 --- a/tests/integration/test_orders_api.py +++ b/tests/integration/test_orders_api.py @@ -13,65 +13,50 @@ # the License. import copy import json +from http import HTTPStatus import logging +import math import os from pathlib import Path -import pytest - -from planet.api import OrdersClient +import httpx +import pytest +import respx -DATA_DIR = Path(os.path.dirname(__file__)).parents[0] / 'data' +from planet import OrdersClient, Session -LOGGER = logging.getLogger(__name__) -# if use mock:// as the prefix, the params get lost -# https://github.com/jamielennox/requests-mock/issues/142 TEST_URL = 'http://MockNotRealURL/' - -@pytest.fixture() -def orders_client(): - return OrdersClient(api_key='doesntmatter', base_url=TEST_URL) - - -@pytest.fixture -def order_description(): - order_name = 'order_description_b0cb3448-0a74-11eb-92a1-a3d779bb08e0.json' - order_filename = DATA_DIR / order_name - return json.load(open(order_filename, 'r')) +LOGGER = logging.getLogger(__name__) @pytest.fixture -def order_details(): - order_name = 'order_details_psorthotile_analytic.json' - order_filename = DATA_DIR / order_name - return json.load(open(order_filename, 'r')) +@pytest.mark.asyncio +async def session(): + async with Session() as ps: + yield ps @pytest.fixture -def oid(): - return 'b0cb3448-0a74-11eb-92a1-a3d779bb08e0' - - -def test_get_order(requests_mock, orders_client, oid, order_description): - get_url = TEST_URL + 'orders/v2/' + oid - requests_mock.get(get_url, status_code=200, json=order_description) - state = orders_client.get_order(oid).state - assert state == 'queued' - - -def test_list_orders(requests_mock, orders_client, order_description): - list_url = TEST_URL + 'orders/v2/' - next_page_url = list_url + '?page_marker=IAmATest' - - order1 = copy.deepcopy(order_description) +def order_descriptions(order_description): + order1 = order_description order1['id'] = 'oid1' order2 = copy.deepcopy(order_description) order2['id'] = 'oid2' order3 = copy.deepcopy(order_description) order3['id'] = 'oid3' + return [order1, order2, order3] + + +@respx.mock +@pytest.mark.asyncio +async def test_list_orders_basic(order_descriptions, session): + list_url = TEST_URL + 'orders/v2/' + next_page_url = list_url + 'blob/?page_marker=IAmATest' + + order1, order2, order3 = order_descriptions page1_response = { "_links": { @@ -79,27 +64,30 @@ def test_list_orders(requests_mock, orders_client, order_description): "next": next_page_url}, "orders": [order1, order2] } - requests_mock.get(list_url, status_code=200, json=page1_response) + mock_resp1 = httpx.Response(HTTPStatus.OK, json=page1_response) + respx.get(list_url).return_value = mock_resp1 page2_response = { "_links": { "_self": next_page_url}, "orders": [order3] } - requests_mock.get(next_page_url, status_code=200, json=page2_response) + mock_resp2 = httpx.Response(HTTPStatus.OK, json=page2_response) + respx.get(next_page_url).return_value = mock_resp2 + + cl = OrdersClient(session, base_url=TEST_URL) + orders = await cl.list_orders() - orders = orders_client.list_orders() oids = list(o.id for o in orders) assert oids == ['oid1', 'oid2', 'oid3'] -def test_list_orders_state(requests_mock, orders_client, order_description): +@respx.mock +@pytest.mark.asyncio +async def test_list_orders_state(order_descriptions, session): list_url = TEST_URL + 'orders/v2/?state=failed' - order1 = copy.deepcopy(order_description) - order1['id'] = 'oid1' - order2 = copy.deepcopy(order_description) - order2['id'] = 'oid2' + order1, order2, _ = order_descriptions page1_response = { "_links": { @@ -107,62 +95,101 @@ def test_list_orders_state(requests_mock, orders_client, order_description): }, "orders": [order1, order2] } - requests_mock.get(list_url, status_code=200, json=page1_response) + mock_resp = httpx.Response(HTTPStatus.OK, json=page1_response) + respx.get(list_url).return_value = mock_resp + + cl = OrdersClient(session, base_url=TEST_URL) + orders = await cl.list_orders(state='failed') - orders = orders_client.list_orders(state='failed') oids = list(o.id for o in orders) assert oids == ['oid1', 'oid2'] -def test_list_orders_limit(requests_mock, orders_client, order_description): - list_url = TEST_URL + 'orders/v2/' - next_page_url = list_url + '?page_marker=IAmATest' - - order1 = copy.deepcopy(order_description) - order1['id'] = 'oid1' - order2 = copy.deepcopy(order_description) - order2['id'] = 'oid2' - order3 = copy.deepcopy(order_description) - order3['id'] = 'oid3' - +@respx.mock +@pytest.mark.asyncio +async def test_list_orders_limit(order_descriptions, session): # check that the client doesn't try to get the next page when the # limit is already reached by providing link to next page but not # registering a response. if the client tries to get the next # page, an error will occur + + list_url = TEST_URL + 'orders/v2/' + nono_page_url = list_url + '?page_marker=OhNoNo' + + order1, order2, order3 = order_descriptions + page1_response = { "_links": { "_self": "string", - "next": next_page_url}, + "next": nono_page_url}, "orders": [order1, order2] } - requests_mock.get(list_url, status_code=200, json=page1_response) + mock_resp = httpx.Response(HTTPStatus.OK, json=page1_response) - orders = orders_client.list_orders(limit=1) - oids = list(o.id for o in orders) + page2_response = { + "_links": { + "_self": "string", + }, + "orders": [order3] + } + mock_resp2 = httpx.Response(HTTPStatus.OK, json=page2_response) + + respx.route(method="GET", url__eq=list_url).mock(return_value=mock_resp) + nono_route = respx.route(method="GET", url__eq=nono_page_url).mock( + return_value=mock_resp2) + + cl = OrdersClient(session, base_url=TEST_URL) + orders = await cl.list_orders(limit=1) + + assert not nono_route.called + oids = [o.id for o in orders] assert oids == ['oid1'] -def test_create_order(requests_mock, orders_client, oid, order_description, - order_details): +@respx.mock +@pytest.mark.asyncio +async def test_create_order(oid, order_description, order_details, session): create_url = TEST_URL + 'orders/v2/' - requests_mock.post(create_url, status_code=200, json=order_description) + mock_resp = httpx.Response(HTTPStatus.OK, json=order_description) + respx.post(create_url).return_value = mock_resp + + cl = OrdersClient(session, base_url=TEST_URL) + created_oid = await cl.create_order(order_details) - created_oid = orders_client.create_order(order_details) assert created_oid == oid -def test_cancel_order(requests_mock, orders_client, oid): - # TODO: the api says cancel order returns the order details but as - # far as I can test thus far, it returns nothing. follow up on this +@respx.mock +@pytest.mark.asyncio +async def test_get_order(oid, order_description, session): + get_url = TEST_URL + 'orders/v2/' + oid + mock_resp = httpx.Response(HTTPStatus.OK, json=order_description) + respx.get(get_url).return_value = mock_resp + + cl = OrdersClient(session, base_url=TEST_URL) + order = await cl.get_order(oid) + + assert order.state == 'queued' + + +@respx.mock +@pytest.mark.asyncio +async def test_cancel_order(oid, order_description, session): cancel_url = TEST_URL + 'orders/v2/' + oid - requests_mock.put(cancel_url, status_code=200, text='') + order_description['state'] = 'cancelled' + mock_resp = httpx.Response(HTTPStatus.OK, json=order_description) + respx.put(cancel_url).return_value = mock_resp - orders_client.cancel_order(oid) + # TODO: the api says cancel order returns the order details but as + # far as I can test thus far, it returns nothing. follow up on this + cl = OrdersClient(session, base_url=TEST_URL) + await cl.cancel_order(oid) -def test_cancel_orders(requests_mock, orders_client): +@respx.mock +@pytest.mark.asyncio +async def test_cancel_orders_by_ids(session): bulk_cancel_url = TEST_URL + 'bulk/orders/v2/cancel' - test_ids = ["oid1", "oid2", "oid3"] example_result = { "result": { @@ -178,19 +205,24 @@ def test_cancel_orders(requests_mock, orders_client): } } } - requests_mock.post(bulk_cancel_url, status_code=200, json=example_result) + mock_resp = httpx.Response(HTTPStatus.OK, json=example_result) + respx.post(bulk_cancel_url).return_value = mock_resp + + cl = OrdersClient(session, base_url=TEST_URL) + res = await cl.cancel_orders(test_ids) - res = orders_client.cancel_orders(test_ids) assert res == example_result expected_body = { "order_ids": test_ids } - history = requests_mock.request_history - assert history[0].json() == expected_body + actual_body = json.loads(respx.calls.last.request.content) + assert actual_body == expected_body -def test_cancel_orders_all(requests_mock, orders_client): +@respx.mock +@pytest.mark.asyncio +async def test_cancel_orders_all(session): bulk_cancel_url = TEST_URL + 'bulk/orders/v2/cancel' example_result = { @@ -202,16 +234,52 @@ def test_cancel_orders_all(requests_mock, orders_client): } } } - requests_mock.post(bulk_cancel_url, status_code=200, json=example_result) + mock_resp = httpx.Response(HTTPStatus.OK, json=example_result) + respx.post(bulk_cancel_url).return_value = mock_resp + + cl = OrdersClient(session, base_url=TEST_URL) + res = await cl.cancel_orders() - res = orders_client.cancel_orders([]) assert res == example_result - history = requests_mock.request_history - assert history[0].json() == {} + actual_body = json.loads(respx.calls.last.request.content) + assert actual_body == {} -def test_aggegated_order_stats(requests_mock, orders_client): +@respx.mock +@pytest.mark.asyncio +async def test_poll(oid, order_description, session): + get_url = TEST_URL + 'orders/v2/' + oid + + order_description2 = copy.deepcopy(order_description) + order_description2['state'] = 'running' + order_description3 = copy.deepcopy(order_description) + order_description3['state'] = 'success' + + cl = OrdersClient(session, base_url=TEST_URL) + + route = respx.get(get_url) + route.side_effect = [ + httpx.Response(HTTPStatus.OK, json=order_description), + httpx.Response(HTTPStatus.OK, json=order_description2), + httpx.Response(HTTPStatus.OK, json=order_description3) + ] + state = await cl.poll(oid, wait=0) + assert state == 'success' + + route = respx.get(get_url) + route.side_effect = [ + httpx.Response(HTTPStatus.OK, json=order_description), + httpx.Response(HTTPStatus.OK, json=order_description2), + httpx.Response(HTTPStatus.OK, json=order_description3) + ] + state = await cl.poll(oid, state='running', wait=0) + assert state == 'running' + + +@respx.mock +@pytest.mark.asyncio +async def test_aggegated_order_stats(session): stats_url = TEST_URL + 'stats/orders/v2/' LOGGER.debug(f'url: {stats_url}') example_stats = { @@ -224,47 +292,72 @@ def test_aggegated_order_stats(requests_mock, orders_client): "running_orders": 0 } } - requests_mock.get(stats_url, status_code=200, json=example_stats) + mock_resp = httpx.Response(HTTPStatus.OK, json=example_stats) + respx.get(stats_url).return_value = mock_resp + + cl = OrdersClient(session, base_url=TEST_URL) + res = await cl.aggregated_order_stats() - res = orders_client.aggregated_order_stats() assert res == example_stats -def test_download_asset(requests_mock, tmpdir, orders_client, open_test_img): +@respx.mock +@pytest.mark.asyncio +async def test_download_asset_md(tmpdir, session): dl_url = TEST_URL + 'download/?token=IAmAToken' - with open_test_img as img: - requests_mock.get( - dl_url, - status_code=200, - body=img, - headers={ - 'Content-Type': 'image/tiff', - 'Content-Length': '527', - 'Content-Disposition': 'attachment; filename="img.tif"' - }) - - filename = orders_client.download_asset( - dl_url, directory=str(tmpdir)) - assert Path(filename).name == 'img.tif' - assert os.path.isfile(filename) - - requests_mock.get( - dl_url, - status_code=200, - json={'key': 'value'}, - headers={ - 'Content-Type': 'application/json', - 'Content-Disposition': 'attachment; filename="metadata.json"' - }) + md_json = {'key': 'value'} + md_headers = { + 'Content-Type': 'application/json', + 'Content-Disposition': 'attachment; filename="metadata.json"' + } + mock_resp = httpx.Response(HTTPStatus.OK, json=md_json, headers=md_headers) + respx.get(dl_url).return_value = mock_resp + + cl = OrdersClient(session, base_url=TEST_URL) + filename = await cl.download_asset(dl_url, directory=str(tmpdir)) - filename = orders_client.download_asset( - dl_url, directory=str(tmpdir)) assert json.loads(open(filename).read()) == {'key': 'value'} + assert Path(filename).name == 'metadata.json' + + +@respx.mock +@pytest.mark.asyncio +async def test_download_asset_img(tmpdir, open_test_img, session): + dl_url = TEST_URL + 'download/?token=IAmAToken' + + img_headers = { + 'Content-Type': 'image/tiff', + 'Content-Length': '527', + 'Content-Disposition': 'attachment; filename="img.tif"' + } + + async def _stream_img(): + data = open_test_img.read() + v = memoryview(data) + chunksize = 100 + for i in range(math.ceil(len(v)/(chunksize))): + yield v[i*chunksize:min((i+1)*chunksize, len(v))] -def test_download_order(requests_mock, tmpdir, orders_client, - order_description, oid): + # populate request parameter to avoid respx cloning, which throws + # an error caused by respx and not this code + # https://github.com/lundberg/respx/issues/130 + mock_resp = httpx.Response(HTTPStatus.OK, stream=_stream_img(), + headers=img_headers, + request='donotcloneme') + respx.get(dl_url).return_value = mock_resp + + cl = OrdersClient(session, base_url=TEST_URL) + filename = await cl.download_asset(dl_url, directory=str(tmpdir)) + + assert Path(filename).name == 'img.tif' + assert os.path.isfile(filename) + + +@respx.mock +@pytest.mark.asyncio +async def test_download_order(tmpdir, order_description, oid, session): dl_url1 = TEST_URL + 'download/1?token=IAmAToken' dl_url2 = TEST_URL + 'download/2?token=IAmAnotherToken' order_description['_links']['results'] = [ @@ -273,52 +366,34 @@ def test_download_order(requests_mock, tmpdir, orders_client, ] get_url = TEST_URL + 'orders/v2/' + oid - requests_mock.get(get_url, status_code=200, json=order_description) + mock_resp = httpx.Response(HTTPStatus.OK, json=order_description) + respx.get(get_url).return_value = mock_resp - requests_mock.get( - dl_url1, - status_code=200, + mock_resp1 = httpx.Response( + HTTPStatus.OK, json={'key': 'value'}, headers={ 'Content-Type': 'application/json', 'Content-Disposition': 'attachment; filename="m1.json"' }) + respx.get(dl_url1).return_value = mock_resp1 - requests_mock.get( - dl_url2, - status_code=200, + mock_resp1 = httpx.Response( + HTTPStatus.OK, json={'key2': 'value2'}, headers={ 'Content-Type': 'application/json', 'Content-Disposition': 'attachment; filename="m2.json"' }) + respx.get(dl_url2).return_value = mock_resp1 - filenames = orders_client.download_order(oid, directory=str(tmpdir)) - assert len(filenames) == 2 - assert json.loads(open(filenames[0]).read()) == {'key': 'value'} - assert json.loads(open(filenames[1]).read()) == {'key2': 'value2'} + cl = OrdersClient(session, base_url=TEST_URL) + filenames = await cl.download_order(oid, directory=str(tmpdir)) + assert len(filenames) == 2 -def test_wait_for_complete(requests_mock, oid, orders_client, - order_description): - get_url = TEST_URL + 'orders/v2/' + oid - - order_description2 = copy.deepcopy(order_description) - order_description2['state'] = 'running' - order_description3 = copy.deepcopy(order_description) - order_description3['state'] = 'success' - - requests_mock.get(get_url, [ - {'json': order_description, 'status_code': 200}, - {'json': order_description2, 'status_code': 200}, - {'json': order_description3, 'status_code': 200}]) - - states = [] - - def _callback(state): - states.append(state) + assert json.loads(open(filenames[0]).read()) == {'key': 'value'} + assert Path(filenames[0]).name == 'm1.json' - state = orders_client.wait_for_complete(oid, wait=0, callback=_callback) - assert state == 'success' - assert requests_mock.call_count == 3 - assert states == ['queued', 'running', 'success'] + assert json.loads(open(filenames[1]).read()) == {'key2': 'value2'} + assert Path(filenames[1]).name == 'm2.json' diff --git a/tests/unit/test_http.py b/tests/unit/test_http.py index d5d07856c..1e9f53ae4 100644 --- a/tests/unit/test_http.py +++ b/tests/unit/test_http.py @@ -11,94 +11,106 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. -from mock import Mock +import logging +from http import HTTPStatus +from unittest.mock import Mock + +import httpx +import respx import pytest -import requests_mock -from planet.api import http, models +from planet.api import exceptions, http TEST_URL = 'mock://fantastic.com' +LOGGER = logging.getLogger(__name__) + @pytest.fixture def mock_request(): r = Mock() - r.url = TEST_URL - r.body_type = models.Body - r.method = 'GET' - r.headers = {} - r.params = None - r.data = None - + r.http_request = httpx.Request( + 'GET', + TEST_URL) yield r -@pytest.fixture -def throttle_adapter(): - adapter = requests_mock.Adapter() - responses = [ - {'json': {'msg': 'msg'}, 'status_code': 429}, - {'json': {'msg': 'msg'}, 'status_code': 200} - ] - adapter.register_uri('GET', TEST_URL, responses) - yield adapter - - -def test_planetsession_contextmanager(): - with http.PlanetSession(): +@pytest.mark.asyncio +async def test_session_contextmanager(): + async with http.Session(): pass -def test_planetsession_request_retry(mock_request, throttle_adapter): - with http.PlanetSession() as ps: - # needed to redirect calls to the adapter - ps._session.mount('mock://', throttle_adapter) +@respx.mock +@pytest.mark.asyncio +async def test_session_request(mock_request): + async with http.Session() as ps: + mock_resp = httpx.Response(HTTPStatus.OK, text='bubba') + respx.get(TEST_URL).return_value = mock_resp + + resp = await ps.request(mock_request) + assert resp.http_response.text == 'bubba' + + +@respx.mock +@pytest.mark.asyncio +async def test_session_stream(mock_request): + async with http.Session() as ps: + mock_resp = httpx.Response(HTTPStatus.OK, text='bubba') + respx.get(TEST_URL).return_value = mock_resp + + async with ps.stream(mock_request) as resp: + txt = await resp.http_response.aread() + assert txt == b'bubba' + + +@pytest.mark.asyncio +async def test_session__raise_for_status(): + await http.Session._raise_for_status(Mock( + status_code=HTTPStatus.CREATED, text='' + )) + + with pytest.raises(exceptions.BadQuery): + await http.Session._raise_for_status(Mock( + status_code=HTTPStatus.BAD_REQUEST, text='' + )) + + with pytest.raises(exceptions.TooManyRequests): + await http.Session._raise_for_status(Mock( + status_code=HTTPStatus.TOO_MANY_REQUESTS, text='' + )) + + with pytest.raises(exceptions.OverQuota): + await http.Session._raise_for_status(Mock( + status_code=HTTPStatus.TOO_MANY_REQUESTS, text='exceeded QUOTA' + )) + + +@respx.mock +@pytest.mark.asyncio +async def test_session_request_retry(mock_request): + async with http.Session() as ps: + route = respx.get(TEST_URL) + route.side_effect = [ + httpx.Response(HTTPStatus.TOO_MANY_REQUESTS), + httpx.Response(HTTPStatus.OK) + ] ps.retry_wait_time = 0 # lets not slow down tests for this - resp = ps.request(mock_request) + resp = await ps.request(mock_request) assert resp + assert route.call_count == 2 + +@respx.mock +@pytest.mark.asyncio +async def test_session_retry(mock_request): + async with http.Session() as ps: + async def test_func(): + raise exceptions.TooManyRequests -def test_redirectsession_rebuilt_auth_called(): - '''verify our hacking around with Session behavior works''' - session = http.RedirectSession() - with requests_mock.Mocker() as m: - m.get('http://redirect.com', status_code=302, headers={ - 'Location': 'http://newredirect.com' - }) - m.get('http://newredirect.com', text='redirected!') - - # base assertion, works as intended - resp = session.get('http://redirect.com') - assert resp.url == 'http://newredirect.com' - assert resp.text == 'redirected!' - - # Authorization headers unpacked and URL is rewritten - resp = session.get('http://redirect.com', headers={ - 'Authorization': 'api-key foobar' - }) - assert resp.url == 'http://newredirect.com/?api_key=foobar' - assert resp.text == 'redirected!' - - # Authorization headers unpacked and URL is rewritten, params saved - m.get('http://redirect.com', status_code=302, headers={ - 'Location': 'http://newredirect.com?param=yep' - }) - m.get('http://newredirect.com?param=yep', text='param!') - resp = session.get('http://redirect.com?param=yep', headers={ - 'Authorization': 'api-key foobar' - }) - assert resp.url == 'http://newredirect.com/?param=yep&api_key=foobar' - assert resp.text == 'param!' - - -def test_redirectsession_is_subdomain_of_tld(): - fcn = http.RedirectSession._is_subdomain_of_tld - assert fcn('http://foo.bar', 'http://foo.bar') - assert fcn('http://one.foo.bar', 'http://foo.bar') - assert fcn('http://foo.bar', 'http://one.foo.bar') - assert not fcn('http://foo.bar', 'http://bar.foo') - assert not fcn('http://one.foo.bar', 'http://bar.foo') - assert not fcn('http://foo.bar', 'http://one.bar.foo') + ps.retry_wait_time = 0 + with pytest.raises(http.SessionException): + await ps.retry(test_func) diff --git a/tests/unit/test_models.py b/tests/unit/test_models.py index 678935a06..8c6ac4708 100644 --- a/tests/unit/test_models.py +++ b/tests/unit/test_models.py @@ -11,21 +11,20 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import io +# import io +import copy import logging import math -from mock import MagicMock +from unittest.mock import MagicMock import os from pathlib import Path +import re +from httpx import URL import pytest -from planet.api import exceptions, models +from planet.api import models -TEST_ITEM_KEY = 'testitem' -TEST_LINKS_KEY = 'testlinks' -TEST_NEXT_KEY = 'testnext' -NUM_ITEMS = 5 LOGGER = logging.getLogger(__name__) @@ -39,163 +38,194 @@ def mock_http_response(json=None, iter_content=None, text=None): m = MagicMock(name='http_response') m.headers = {} m.json.return_value = json or {} - m.iter_content = iter_content + m.aiter_content = iter_content m.text = text or '' return m -def test_Request__raise_for_status(): - models.Response._raise_for_status(201, mock_http_response(text='')) - - with pytest.raises(exceptions.TooManyRequests): - models.Response._raise_for_status(429, mock_http_response(text='')) - - with pytest.raises(exceptions.OverQuota): - msg = 'exceeded QUOTA dude' - models.Response._raise_for_status(429, mock_http_response(text=msg)) - - -def test_Body_write(tmpdir, mocked_request): - chunks = ((str(i) * 16000).encode('utf-8') for i in range(10)) - - body = models.Body(mocked_request, mock_http_response( - iter_content=lambda chunk_size: chunks - )) - buf = io.BytesIO() - body.write(buf) - - assert len(buf.getvalue()) == 160000 - - -def test_Body_write_img(requests_mock, tmpdir, mocked_request, open_test_img): - data = open_test_img.read() - v = memoryview(data) - - chunksize = 100 - chunks = (v[i*chunksize:min((i+1)*chunksize, len(v))] - for i in range(math.ceil(len(v)/(chunksize)))) - - body = models.Body(mocked_request, mock_http_response( - iter_content=lambda chunk_size: chunks - )) +def test_StreamingBody_name(): + r = MagicMock(name='response') + r.request.url = URL('https://planet.com/path/to/example.tif?foo=f6f1') + hr = MagicMock(name='http_response') + hr.headers = { + 'date': 'Thu, 14 Feb 2019 16:13:26 GMT', + 'last-modified': 'Wed, 22 Nov 2017 17:22:31 GMT', + 'accept-ranges': 'bytes', + 'content-type': 'image/tiff', + 'content-length': '57350256', + 'content-disposition': 'attachment; filename="open_california.tif"' + } + r.http_response = hr + body = models.StreamingBody(r) + + assert body.name == 'open_california.tif' + + r = MagicMock(name='response') + r.request.url = URL('https://planet.com/path/to/example.tif?foo=f6f1') + hr = MagicMock(name='http_response') + hr.headers = { + 'date': 'Thu, 14 Feb 2019 16:13:26 GMT', + 'last-modified': 'Wed, 22 Nov 2017 17:22:31 GMT', + 'accept-ranges': 'bytes', + 'content-type': 'image/tiff', + 'content-length': '57350256', + } + r.http_response = hr + body = models.StreamingBody(r) + + assert body.name == 'example.tif' + + r = MagicMock(name='response') + r.request.url = URL('https://planet.com/path/to/noname/') + hr = MagicMock(name='http_response') + hr.headers = { + 'date': 'Thu, 14 Feb 2019 16:13:26 GMT', + 'last-modified': 'Wed, 22 Nov 2017 17:22:31 GMT', + 'accept-ranges': 'bytes', + 'content-type': 'image/tiff', + 'content-length': '57350256', + } + r.http_response = hr + body = models.StreamingBody(r) + + assert body.name.startswith('planet-') + assert (body.name.endswith('.tiff') or + body.name.endswith('.tif')) + + +@pytest.mark.parametrize('headers,expected', [ + ({ + 'date': 'Thu, 14 Feb 2019 16:13:26 GMT', + 'last-modified': 'Wed, 22 Nov 2017 17:22:31 GMT', + 'accept-ranges': 'bytes', + 'content-type': 'image/tiff', + 'content-length': '57350256', + 'content-disposition': 'attachment; filename="open_california.tif"' + }, 'open_california.tif'), + ({ + 'date': 'Thu, 14 Feb 2019 16:13:26 GMT', + 'last-modified': 'Wed, 22 Nov 2017 17:22:31 GMT', + 'accept-ranges': 'bytes', + 'content-type': 'image/tiff', + 'content-length': '57350256' + }, None), + ({}, None) +]) +def test__get_filename_from_headers(headers, expected): + assert models._get_filename_from_headers(headers) == expected + + +@pytest.mark.parametrize('url,expected', [ + (URL('https://planet.com/'), None), + (URL('https://planet.com/path/to/'), None), + (URL('https://planet.com/path/to/example.tif'), 'example.tif'), + (URL('https://planet.com/path/to/example.tif?foo=f6f1&bar=baz'), + 'example.tif'), + (URL('https://planet.com/path/to/example.tif?foo=f6f1#quux'), + 'example.tif'), +]) +def test__get_filename_from_url(url, expected): + assert models._get_filename_from_url(url) == expected + + +@pytest.mark.parametrize('content_type,check', [ + (None, lambda x: re.match(r'^planet-[a-z0-9]{8}$', x, re.I) is not None), + ('image/tiff', lambda x: x.endswith(('.tif', '.tiff'))), +]) +def test__get_random_filename(content_type, check): + assert check(models._get_random_filename(content_type)) + + +@pytest.mark.asyncio +async def test_StreamingBody_write_img(tmpdir, mocked_request, open_test_img): + async def _aiter_bytes(): + data = open_test_img.read() + v = memoryview(data) + + chunksize = 100 + for i in range(math.ceil(len(v)/(chunksize))): + yield v[i*chunksize:min((i+1)*chunksize, len(v))] + + r = MagicMock(name='response') + hr = MagicMock(name='http_response') + hr.aiter_bytes = _aiter_bytes + hr.num_bytes_downloaded = 0 + hr.headers['Content-Length'] = 527 + r.http_response = hr + body = models.StreamingBody(r) filename = Path(str(tmpdir)) / 'test.tif' - body.write(file=filename) + await body.write(filename, progress_bar=False) assert os.path.isfile(filename) assert os.stat(filename).st_size == 527 -def test_Body_write_to_file_callback(mocked_request, tmpdir): - class Tracker(object): - def __init__(self): - self.calls = [] - - def get_callback(self): - def register_call(start=None, wrote=None, total=None, finish=None, - skip=None): - if start is not None: - self.calls.append('start') - if wrote is not None and total is not None: - self.calls.append('wrote, total') - if finish is not None: - self.calls.append('finish') - if skip is not None: - self.calls.append('skip') - return register_call +@pytest.fixture +def get_pages(): + p1 = {'links': {'next': 'blah'}, + 'items': [1, 2]} + p2 = {'links': {}, + 'items': [3, 4]} + responses = [ + mock_http_response(json=p1), + mock_http_response(json=p2) + ] - chunks = ((str(i) * 16000).encode('utf-8') for i in range(2)) + async def do_get(req): + return responses.pop(0) - body = models.Body(mocked_request, mock_http_response( - iter_content=lambda chunk_size: chunks - )) + return do_get - test = Tracker() - filename = Path(str(tmpdir)) / 'test.tif' - body.write_to_file(filename=filename, callback=test.get_callback()) - assert test.calls == ['start', 'wrote, total', 'wrote, total', 'finish'] +@pytest.mark.asyncio +async def test_Paged_iterator(get_pages): + req = MagicMock() + paged = models.Paged(req, get_pages) + assert [1, 2, 3, 4] == [i async for i in paged] - # should skip writing the file because a file with that filename already - # exists - test.calls = [] - body.write_to_file(filename=filename, callback=test.get_callback(), - overwrite=False) - assert test.calls == ['skip'] +@pytest.mark.asyncio +async def test_Paged_limit(get_pages): + req = MagicMock() + paged = models.Paged(req, get_pages, limit=3) + assert [1, 2, 3] == [i async for i in paged] -# class TestPaged(models.Paged): -# def _get_item_key(self): -# return TEST_ITEM_KEY -# -# def _get_links_key(self): -# return TEST_LINKS_KEY -# -# def _get_next_key(self): -# return TEST_NEXT_KEY -# -# -# @pytest.fixture -# def test_paged(): -# request = models.Request('url', 'auth') -# -# # make 5 pages with 5 items on each page -# pages = _make_pages(5, NUM_ITEMS) -# http_response = mock_http_response(json=next(pages)) -# -# # initialize the paged object with the first page -# paged = TestPaged(request, http_response) -# -# # the remaining 4 get used here -# ps = MagicMock(name='PlanetSession') -# ps.request.side_effect = ( -# mock_http_response(json=p) for p in pages -# ) -# # mimic dispatcher.response -# return paged -# -# -# def _make_pages(cnt, num): -# '''generator of 'cnt' pages containing 'num' content''' -# start = 0 -# for p in range(num): -# nxt = 'page %d' % (p + 1,) if p + 1 < num else None -# start, page = _make_test_page(cnt, start, nxt) -# yield page -# -# -# def _make_test_page(cnt, start, nxt): -# '''fake paged content''' -# envelope = { -# TEST_LINKS_KEY: { -# TEST_NEXT_KEY: nxt -# }, -# TEST_ITEM_KEY: [{ -# 'testitementry': start + t -# } for t in range(cnt)] -# } -# return start + cnt, envelope -# -# def test_Paged_next(test_paged): -# pages = list(test_paged.iter(2)) -# assert 2 == len(pages) -# assert NUM_ITEMS == len(pages[0].get()[TEST_ITEM_KEY]) -# assert NUM_ITEMS == len(pages[1].get()[TEST_ITEM_KEY]) -# -# -# def test_Paged_iter(test_paged): -# pages = list(test_paged.iter(2)) -# assert 2 == len(pages) -# assert NUM_ITEMS == len(pages[0].get()[TEST_ITEM_KEY]) -# assert NUM_ITEMS == len(pages[1].get()[TEST_ITEM_KEY]) -# -# -# @pytest.mark.skip(reason='not implemented') -# def test_Paged_items_iter(): -# pass -# -# -# @pytest.mark.skip(reason='not implemented') -# def test_Paged_json_encode(): -# pass +@pytest.fixture +def get_orders_pages(orders_page): + page2 = copy.deepcopy(orders_page) + del page2['_links']['next'] + responses = [ + mock_http_response(json=orders_page), + mock_http_response(json=page2) + ] + + async def do_get(req): + return responses.pop(0) + + return do_get + + +@pytest.mark.asyncio +async def test_Orders(get_orders_pages): + req = MagicMock() + orders = models.Orders(req, get_orders_pages) + expected_ids = [ + 'f05b1ed7-11f0-43da-960c-a624f7c355c8', + '8d4799c4-5291-40c0-a7f5-adb9a974455d', + 'f05b1ed7-11f0-43da-960c-a624f7c355c8', + '8d4799c4-5291-40c0-a7f5-adb9a974455d' + ] + assert expected_ids == [o.id async for o in orders] + + +def test_Order_results(order_description): + order = models.Order(order_description) + assert len(order.results) == 3 + + +def test_Order_locations(order_description): + order = models.Order(order_description) + expected_locations = ['location1', 'location2', 'location3'] + assert order.locations == expected_locations diff --git a/tests/unit/test_order_details.py b/tests/unit/test_order_details.py new file mode 100644 index 000000000..5ef70e284 --- /dev/null +++ b/tests/unit/test_order_details.py @@ -0,0 +1,357 @@ +# Copyright 2020 Planet Labs, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging + +import pytest + +from planet import specs +from planet.api import order_details + +LOGGER = logging.getLogger(__name__) + +TEST_ID = 'doesntmatter' +TEST_PRODUCT_BUNDLE = 'analytic_sr' +TEST_FALLBACK_BUNDLE = 'analytic' +TEST_ITEM_TYPE = 'PSOrthoTile' +TEST_ARCHIVE_FILENAME = '{{name}}_b_{order_id}}.zip' + + +def test_OrderDetails(): + test_product = order_details.Product( + [TEST_ID], TEST_PRODUCT_BUNDLE, TEST_ITEM_TYPE) + _ = order_details.OrderDetails('test', [test_product]) + + _ = order_details.OrderDetails( + 'test', + [test_product], + order_details.Delivery(archive_type='zip'), + order_details.Notifications(email=True), + [order_details.Tool('file_format', 'COG')] + ) + + +def test_OrderDetails_from_dict(): + min_details = { + 'name': 'test', + 'products': [ + { + 'item_ids': [TEST_ID], + 'item_type': TEST_ITEM_TYPE, + 'product_bundle': TEST_PRODUCT_BUNDLE + } + ], + } + _ = order_details.OrderDetails.from_dict(min_details) + + details = { + 'name': 'test', + 'products': [ + { + 'item_ids': [TEST_ID], + 'item_type': TEST_ITEM_TYPE, + 'product_bundle': TEST_PRODUCT_BUNDLE + } + ], + 'subscription_id': 1, + 'delivery': {'archive_type': 'zip'}, + 'notifications': {'email': True}, + 'tools': [ + {'file_format': 'COG'}, + {'toar': {'scale_factor': 10000}} + ] + } + + od = order_details.OrderDetails.from_dict(details) + assert od.subscription_id == 1 + assert od.delivery.archive_type == 'zip' + assert od.notifications.email + assert od.tools[0].name == 'file_format' + assert od.tools[0].parameters == 'COG' + assert od.tools[1].name == 'toar' + assert od.tools[1].parameters == {'scale_factor': 10000} + + +def test_OrderDetails_to_dict(): + test_product = order_details.Product( + [TEST_ID], TEST_PRODUCT_BUNDLE, TEST_ITEM_TYPE) + + od = order_details.OrderDetails( + 'test', + [test_product], + subscription_id=1, + delivery=order_details.Delivery(archive_type='zip'), + notifications=order_details.Notifications(email=True), + tools=[order_details.Tool('file_format', 'COG')] + ) + + expected = { + 'name': 'test', + 'products': [ + { + 'item_ids': [TEST_ID], + 'item_type': TEST_ITEM_TYPE, + 'product_bundle': TEST_PRODUCT_BUNDLE + } + ], + 'subscription_id': 1, + 'delivery': {'archive_type': 'zip'}, + 'notifications': {'email': True}, + 'tools': [{'file_format': 'COG'}] + } + assert expected == od.to_dict() + + +def test_Product(): + _ = order_details.Product([TEST_ID], TEST_PRODUCT_BUNDLE, TEST_ITEM_TYPE, + fallback_bundle=TEST_FALLBACK_BUNDLE) + + with pytest.raises(specs.SpecificationException): + _ = order_details.Product([TEST_ID], + 'notsupported', + TEST_ITEM_TYPE, + fallback_bundle=TEST_FALLBACK_BUNDLE) + + _ = order_details.Product([TEST_ID], + TEST_PRODUCT_BUNDLE, + 'notsupported', + fallback_bundle=TEST_FALLBACK_BUNDLE) + + _ = order_details.Product([TEST_ID], + TEST_PRODUCT_BUNDLE, + TEST_ITEM_TYPE, + fallback_bundle='notsupported') + + +def test_Product_from_dict(): + test_details = { + 'item_ids': [TEST_ID], + 'item_type': TEST_ITEM_TYPE, + 'product_bundle': f'{TEST_PRODUCT_BUNDLE},{TEST_FALLBACK_BUNDLE}' + } + + p = order_details.Product.from_dict(test_details) + assert p.item_ids == [TEST_ID] + assert p.item_type == TEST_ITEM_TYPE + assert p.product_bundle == TEST_PRODUCT_BUNDLE + assert p.fallback_bundle == TEST_FALLBACK_BUNDLE + + +def test_Product_to_dict(): + p = order_details.Product([TEST_ID], TEST_PRODUCT_BUNDLE, TEST_ITEM_TYPE, + fallback_bundle=TEST_FALLBACK_BUNDLE) + p_dict = p.to_dict() + + expected = { + "item_ids": [TEST_ID], + "item_type": TEST_ITEM_TYPE, + "product_bundle": f'{TEST_PRODUCT_BUNDLE},{TEST_FALLBACK_BUNDLE}' + } + + assert p_dict == expected + + +def test_Notifications_from_dict(): + test_details = { + 'email': 'email', + 'webhook_url': 'webhookurl', + 'webhook_per_order': True + } + + n = order_details.Notifications.from_dict(test_details) + assert n.email == 'email' + assert n.webhook_url == 'webhookurl' + assert n.webhook_per_order + + +def test_Notifications_to_dict(): + n = order_details.Notifications(email='email') + assert n.to_dict() == {'email': 'email'} + + n = order_details.Notifications(webhook_url='webhookurl') + assert n.to_dict() == {'webhook_url': 'webhookurl'} + + n = order_details.Notifications(webhook_per_order=True) + assert n.to_dict() == {'webhook_per_order': True} + + +def test_Delivery(): + d = order_details.Delivery(archive_type='Zip') + assert d.archive_type == 'zip' + + +def test_Delivery_from_dict(): + test_details = { + 'archive_type': 'zip', + 'single_archive': True, + 'archive_filename': TEST_ARCHIVE_FILENAME + } + + d = order_details.Delivery.from_dict(test_details) + assert d.archive_type == 'zip' + assert d.single_archive + assert d.archive_filename == TEST_ARCHIVE_FILENAME + + test_details_cloud = { + 'cloud': {'a': 'val'}, + 'archive_type': 'zip', + 'single_archive': True, + 'archive_filename': TEST_ARCHIVE_FILENAME + } + + class TestDelivery(order_details.Delivery): + cloud_key = 'cloud' + + def __init__(self, a, archive_type, single_archive, archive_filename): + self.a = a + super().__init__(archive_type, single_archive, archive_filename) + + # does the dict get parsed correctly and do the values get sent to the + # constructor? + d2 = TestDelivery.from_dict(test_details_cloud) + assert d2.a == 'val' + + +def test_Delivery_to_dict(): + d = order_details.Delivery(archive_type='zip', + single_archive=True, + archive_filename=TEST_ARCHIVE_FILENAME) + details = d.to_dict() + expected = { + 'archive_type': 'zip', + 'single_archive': True, + 'archive_filename': TEST_ARCHIVE_FILENAME + } + assert details == expected + + d = order_details.Delivery(archive_type='zip') + details = d.to_dict() + expected = { + 'archive_type': 'zip', + } + assert details == expected + + +def test_AmazonS3Delivery_to_dict(): + aws_access_key_id = 'keyid' + aws_secret_access_key = 'accesskey' + bucket = 'bucket' + aws_region = 'awsregion' + archive_type = 'zip' + + d = order_details.AmazonS3Delivery( + aws_access_key_id, + aws_secret_access_key, + bucket, + aws_region, + archive_type=archive_type) + details = d.to_dict() + expected = { + 'amazon_s3': { + 'aws_access_key_id': aws_access_key_id, + 'aws_secret_access_key': aws_secret_access_key, + 'bucket': bucket, + 'aws_region': aws_region + }, + 'archive_type': archive_type + } + assert details == expected + + +def test_AzureBlobStorageDelivery_to_dict(): + account = 'account' + container = 'container' + sas_token = 'sas_token' + archive_type = 'zip' + + d = order_details.AzureBlobStorageDelivery( + account, + container, + sas_token, + archive_type=archive_type) + details = d.to_dict() + expected = { + 'azure_blob_storage': { + 'account': account, + 'container': container, + 'sas_token': sas_token, + }, + 'archive_type': archive_type + } + assert details == expected + + +def test_GoogleCloudStorageDelivery_to_dict(): + bucket = 'bucket' + credentials = 'credentials' + archive_type = 'zip' + + d = order_details.GoogleCloudStorageDelivery( + bucket, + credentials, + archive_type=archive_type) + details = d.to_dict() + expected = { + 'google_cloud_storage': { + 'bucket': bucket, + 'credentials': credentials, + }, + 'archive_type': archive_type + } + assert details == expected + + +def test_GoogleEarthEngineDelivery_to_dict(): + project = 'project' + collection = 'collection' + archive_type = 'zip' + + d = order_details.GoogleEarthEngineDelivery( + project, + collection, + archive_type=archive_type) + details = d.to_dict() + expected = { + 'google_earth_engine': { + 'project': project, + 'collection': collection, + }, + 'archive_type': archive_type + } + assert details == expected + + +def test_Tool(): + _ = order_details.Tool('band_math', 'jsonstring') + + with pytest.raises(specs.SpecificationException): + _ = order_details.Tool('notsupported', 'jsonstring') + + +def test_Tool_from_dict(): + details = { + 'band_math': {'b1': 'b1+b2'} + } + tool = order_details.Tool.from_dict(details) + assert tool.name == 'band_math' + assert tool.parameters == {'b1': 'b1+b2'} + + with pytest.raises(order_details.ToolException): + _ = order_details.Tool.from_dict({'name': 'val', 'oops': 'error'}) + + +def test_Tool_to_dict(): + _ = order_details.Tool('band_math', 'jsonstring') + + with pytest.raises(specs.SpecificationException): + _ = order_details.Tool('notsupported', 'jsonstring') diff --git a/tests/unit/test_orders.py b/tests/unit/test_orders.py deleted file mode 100644 index d0f69292a..000000000 --- a/tests/unit/test_orders.py +++ /dev/null @@ -1,180 +0,0 @@ -# Copyright 2020 Planet Labs, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import logging - -import pytest - -from planet.api import orders - -LOGGER = logging.getLogger(__name__) - - -@pytest.fixture -def order_description(): - test_order = { - "_links": { - "_self": "selflocation", - "results": [ - { - "delivery": "success", - "expires_at": "2020-12-04T22:25:30.262Z", - "location": "location1", - "name": "name1" - }, - { - "delivery": "success", - "expires_at": "2020-12-04T22:25:30.264Z", - "location": "location2", - "name": "name2" - }, - { - "delivery": "success", - "expires_at": "2020-12-04T22:25:30.267Z", - "location": "location3", - "name": "name3" - } - ] - }, - "created_on": "2020-12-03T22:20:04.153Z", - "error_hints": [], - "id": "adca02a4-58eb-44b3-956f-b09aef7be02a", - "last_message": "Manifest delivery completed", - "last_modified": "2020-12-03T22:22:35.619Z", - "name": "test_order", - "products": [ - { - "item_ids": [ - "3949357_1454705_2020-12-01_241c" - ], - "item_type": "PSOrthoTile", - "product_bundle": "analytic" - } - ], - "state": "success" - } - return test_order - - -def test_Order_results(order_description): - order = orders.Order(order_description) - assert len(order.results) == 3 - - -def test_Order_locations(order_description): - order = orders.Order(order_description) - expected_locations = ['location1', 'location2', 'location3'] - assert order.locations == expected_locations - - -@pytest.fixture -def test_order_details_dict(): - test_order_details = { - "name": "string", - "subscription_id": 0, - "products": [ - { - "item_ids": [ - "string" - ], - "item_type": "psorthotile", - "product_bundle": "analytic" - } - ], - "delivery": { - "single_archive": True, - "archive_type": "string", - "archive_filename": "string", - "layout": { - "format": "standard" - }, - "amazon_s3": { - "bucket": "string", - "aws_region": "string", - "aws_access_key_id": "string", - "aws_secret_access_key": "string", - "path_prefix": "string" - }, - "azure_blob_storage": { - "account": "string", - "container": "string", - "sas_token": "string", - "storage_endpoint_suffix": "string", - "path_prefix": "string" - }, - "google_cloud_storage": { - "bucket": "string", - "credentials": "string", - "path_prefix": "string" - }, - "google_earth_engine": { - "project": "string", - "collection": "string" - } - }, - "notifications": { - "webhook": { - "url": "string", - "per_order": True - }, - "email": True - }, - "order_type": "full", - "tools": [ - { - "anchor_item": "string", - "method": "string", - "anchor_bundle": "string", - "strict": True - } - ] - } - return test_order_details - - -def test_OrderDetails_substitute_supported(): - key = 'abc' - supported = ['Supported', 'whyamihere'] - - # ensure capitalization doesn't matter - valid_product = {key: 'supported'} - orders.OrderDetails._substitute_supported(valid_product, key, supported) - assert valid_product[key] == 'Supported' - - # ensure an exception is raised when the entry is not supported - invalid_product = {key: 'notsupported'} - with pytest.raises(orders.OrderDetailsException): - orders.OrderDetails._substitute_supported( - invalid_product, key, supported) - - -def test_OrderDetails__validate_details_valid(test_order_details_dict): - test_order_details_dict['products'][0]['product_bundle'] = 'ANALYTIC' - test_order_details_dict['products'][0]['item_type'] = 'psorthotile' - - _ = orders.OrderDetails(test_order_details_dict) - - -def test_OrderDetails__validate_details_invalid(test_order_details_dict): - invalid_bundle = test_order_details_dict.copy() - invalid_bundle['products'][0]['product_bundle'] = 'nope' - - with pytest.raises(orders.OrderDetailsException): - _ = orders.OrderDetails(invalid_bundle) - - invalid_item = test_order_details_dict.copy() - invalid_item['products'][0]['product_bundle'] = 'ANALYTIC' - invalid_item['products'][0]['item_type'] = 'nope' - - with pytest.raises(orders.OrderDetailsException): - _ = orders.OrderDetails(invalid_item) diff --git a/tests/unit/test_orders_spec.py b/tests/unit/test_specs.py similarity index 54% rename from tests/unit/test_orders_spec.py rename to tests/unit/test_specs.py index da4b28001..5ffd9b7ac 100644 --- a/tests/unit/test_orders_spec.py +++ b/tests/unit/test_specs.py @@ -13,6 +13,8 @@ # limitations under the License. import logging +import pytest + from planet import specs LOGGER = logging.getLogger(__name__) @@ -31,6 +33,39 @@ def test_get_type_match(): test_entry = 'locket' assert 'Locket' == specs.get_match(test_entry, spec_list) + with pytest.raises(specs.NoMatchException): + specs.get_match('a', ['b']) + + +def test_validate_bundle(): + assert 'analytic' == specs.validate_bundle('ANALYTIC') + + with pytest.raises(specs.SpecificationException): + specs.validate_bundle('notsupported') + + +def test_validate_item_type(): + assert 'PSOrthoTile' == specs.validate_item_type('psorthotile', 'analytic') + + with pytest.raises(specs.SpecificationException): + specs.validate_item_type('psorthotile', 'wha') + + specs.validate_item_type('notsupported', 'analytic') + + +def test_validate_order_type(): + assert 'full' == specs.validate_order_type('FULL') + + with pytest.raises(specs.SpecificationException): + specs.validate_order_type('notsupported') + + +def test_validate_arhive_type(): + assert 'zip' == specs.validate_archive_type('ZIP') + + with pytest.raises(specs.SpecificationException): + specs.validate_archive_type('notsupported') + def test_get_product_bundles(): bundles = specs.get_product_bundles() diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py deleted file mode 100644 index 6f4c2659e..000000000 --- a/tests/unit/test_utils.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright 2015 Planet Labs, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import pytest -import re -from planet.api import utils - - -@pytest.mark.parametrize('headers,expected', [ - ({ - 'date': 'Thu, 14 Feb 2019 16:13:26 GMT', - 'last-modified': 'Wed, 22 Nov 2017 17:22:31 GMT', - 'accept-ranges': 'bytes', - 'content-type': 'image/tiff', - 'content-length': '57350256', - 'content-disposition': 'attachment; filename="open_california.tif"' - }, 'open_california.tif'), - ({ - 'date': 'Thu, 14 Feb 2019 16:13:26 GMT', - 'last-modified': 'Wed, 22 Nov 2017 17:22:31 GMT', - 'accept-ranges': 'bytes', - 'content-type': 'image/tiff', - 'content-length': '57350256' - }, None), - ({}, None) -]) -def test_get_filename_from_headers(headers, expected): - assert utils.get_filename_from_headers(headers) == expected - - -@pytest.mark.parametrize('url,expected', [ - ('https://planet.com/', None), - ('https://planet.com/path/to/', None), - ('https://planet.com/path/to/example.tif', 'example.tif'), - ('https://planet.com/path/to/example.tif?foo=f6f1&bar=baz', 'example.tif'), - ('https://planet.com/path/to/example.tif?foo=f6f1#quux', 'example.tif'), -]) -def test_get_filename_from_url(url, expected): - assert utils.get_filename_from_url(url) == expected - - -@pytest.mark.parametrize('content_type,check', [ - (None, lambda x: re.match(r'^planet-[a-z0-9]{8}$', x, re.I) is not None), - ('image/tiff', lambda x: x.endswith(('.tif', '.tiff'))), -]) -def test_get_random_filename(content_type, check): - assert check(utils.get_random_filename(content_type))