From 7bfe7fbaac144b8e63e2b20657026ee1c86e245d Mon Sep 17 00:00:00 2001 From: Sylvain Brunato <61419125+sbrunato@users.noreply.github.com> Date: Fri, 17 Jan 2025 15:25:54 +0100 Subject: [PATCH] refactor: aws auth typing and generic types (#1486) --- docs/conf.py | 6 +- eodag/api/core.py | 86 +++++------ eodag/api/product/_assets.py | 10 +- eodag/api/product/_product.py | 14 +- eodag/api/product/metadata_mapping.py | 124 ++++++++-------- eodag/api/search_result.py | 36 ++--- eodag/cli.py | 8 +- eodag/config.py | 135 +++++++++--------- eodag/plugins/apis/base.py | 2 +- eodag/plugins/apis/ecmwf.py | 19 +-- eodag/plugins/apis/usgs.py | 21 +-- eodag/plugins/authentication/aws_auth.py | 29 ++-- eodag/plugins/authentication/base.py | 8 +- eodag/plugins/authentication/header.py | 6 +- eodag/plugins/authentication/keycloak.py | 8 +- eodag/plugins/authentication/oauth.py | 10 +- .../plugins/authentication/openid_connect.py | 28 ++-- eodag/plugins/authentication/sas_auth.py | 8 +- eodag/plugins/authentication/token.py | 14 +- .../plugins/authentication/token_exchange.py | 2 +- eodag/plugins/base.py | 8 +- eodag/plugins/crunch/base.py | 8 +- eodag/plugins/crunch/filter_date.py | 8 +- .../plugins/crunch/filter_latest_intersect.py | 12 +- .../plugins/crunch/filter_latest_tpl_name.py | 14 +- eodag/plugins/crunch/filter_overlap.py | 8 +- eodag/plugins/crunch/filter_property.py | 8 +- eodag/plugins/download/aws.py | 90 +++++------- eodag/plugins/download/base.py | 25 ++-- eodag/plugins/download/creodias_s3.py | 4 +- eodag/plugins/download/http.py | 53 +++---- eodag/plugins/download/s3rest.py | 9 +- eodag/plugins/manager.py | 30 ++-- eodag/plugins/search/__init__.py | 11 +- eodag/plugins/search/base.py | 53 +++---- eodag/plugins/search/build_search_result.py | 101 ++++++------- eodag/plugins/search/cop_marine.py | 22 +-- eodag/plugins/search/creodias_s3.py | 4 +- eodag/plugins/search/csw.py | 22 +-- eodag/plugins/search/data_request_search.py | 28 ++-- eodag/plugins/search/qssearch.py | 92 ++++++------ eodag/plugins/search/static_stac_search.py | 6 +- eodag/rest/cache.py | 4 +- eodag/rest/config.py | 6 +- eodag/rest/core.py | 48 +++---- eodag/rest/errors.py | 10 +- eodag/rest/server.py | 14 +- eodag/rest/stac.py | 76 +++++----- eodag/rest/types/collections_search.py | 6 +- eodag/rest/types/eodag_search.py | 46 +++--- eodag/rest/types/queryables.py | 26 ++-- eodag/rest/types/stac_search.py | 40 ++---- eodag/rest/utils/__init__.py | 32 ++--- eodag/rest/utils/cql_evaluate.py | 12 +- eodag/rest/utils/rfc3339.py | 4 +- eodag/types/__init__.py | 42 +++--- eodag/types/bbox.py | 4 +- eodag/types/download_args.py | 4 +- eodag/types/search_args.py | 8 +- eodag/types/whoosh.py | 4 +- eodag/utils/__init__.py | 72 +++++----- eodag/utils/exceptions.py | 4 +- eodag/utils/import_system.py | 4 +- eodag/utils/requests.py | 4 +- eodag/utils/rest.py | 4 +- eodag/utils/stac_reader.py | 20 +-- tests/integration/test_core_search_results.py | 2 +- tests/test_requirements.py | 12 +- tests/units/test_download_plugins.py | 6 +- tests/units/test_http_server.py | 6 +- tests/units/test_stac_utils.py | 4 +- tests/utils.py | 4 +- utils/params_mapping_to_csv.py | 5 +- utils/product_types_information_to_csv.py | 6 +- 74 files changed, 812 insertions(+), 917 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index c8a6e9537..dd980df07 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -23,7 +23,7 @@ import re from datetime import datetime from importlib.metadata import metadata -from typing import Any, Dict, List +from typing import Any # -- General configuration ------------------------------------------------ @@ -168,7 +168,7 @@ "custom.css", ] -html_js_files: List[Any] = [] +html_js_files: list[Any] = [] # Custom sidebar templates, must be a dictionary that maps document names # to template names. @@ -184,7 +184,7 @@ # -- Options for LaTeX output --------------------------------------------- -latex_elements: Dict[str, str] = { +latex_elements: dict[str, str] = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', diff --git a/eodag/api/core.py b/eodag/api/core.py index ea06c040f..871b11596 100644 --- a/eodag/api/core.py +++ b/eodag/api/core.py @@ -24,7 +24,7 @@ import shutil import tempfile from operator import itemgetter -from typing import TYPE_CHECKING, Any, Dict, Iterator, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Iterator, Optional, Union import geojson import pkg_resources @@ -185,7 +185,7 @@ def __init__( self._plugins_manager.rebuild(self.providers_config) # store pruned providers configs - self._pruned_providers_config: Dict[str, Any] = {} + self._pruned_providers_config: dict[str, Any] = {} # filter out providers needing auth that have no credentials set self._prune_providers_list() @@ -335,7 +335,7 @@ def set_preferred_provider(self, provider: str) -> None: new_priority = max_priority + 1 self._plugins_manager.set_priority(provider, new_priority) - def get_preferred_provider(self) -> Tuple[str, int]: + def get_preferred_provider(self) -> tuple[str, int]: """Get the provider currently set as the preferred one for searching products, along with its priority. @@ -351,7 +351,7 @@ def get_preferred_provider(self) -> Tuple[str, int]: def update_providers_config( self, yaml_conf: Optional[str] = None, - dict_conf: Optional[Dict[str, Any]] = None, + dict_conf: Optional[dict[str, Any]] = None, ) -> None: """Update providers configuration with given input. Can be used to add a provider to existing configuration or update @@ -397,12 +397,12 @@ def add_provider( name: str, url: Optional[str] = None, priority: Optional[int] = None, - search: Dict[str, Any] = {"type": "StacSearch"}, - products: Dict[str, Any] = { + search: dict[str, Any] = {"type": "StacSearch"}, + products: dict[str, Any] = { GENERIC_PRODUCT_TYPE: {"productType": "{productType}"} }, - download: Dict[str, Any] = {"type": "HTTPDownload", "auth_error_code": 401}, - **kwargs: Dict[str, Any], + download: dict[str, Any] = {"type": "HTTPDownload", "auth_error_code": 401}, + **kwargs: dict[str, Any], ): """Adds a new provider. @@ -421,7 +421,7 @@ def add_provider( :param download: Download :class:`~eodag.config.PluginConfig` mapping :param kwargs: Additional :class:`~eodag.config.ProviderConfig` mapping """ - conf_dict: Dict[str, Any] = { + conf_dict: dict[str, Any] = { name: { "url": url, "search": {"type": "StacSearch", **search}, @@ -565,7 +565,7 @@ def set_locations_conf(self, locations_conf_path: str) -> None: main_locations_config = locations_config[main_key] logger.info("Locations configuration loaded from %s" % locations_conf_path) - self.locations_config: List[Dict[str, Any]] = main_locations_config + self.locations_config: list[dict[str, Any]] = main_locations_config else: logger.info( "Could not load locations configuration from %s" % locations_conf_path @@ -574,7 +574,7 @@ def set_locations_conf(self, locations_conf_path: str) -> None: def list_product_types( self, provider: Optional[str] = None, fetch_providers: bool = True - ) -> List[Dict[str, Any]]: + ) -> list[dict[str, Any]]: """Lists supported product types. :param provider: (optional) The name of a provider that must support the product @@ -588,7 +588,7 @@ def list_product_types( # First, update product types list if possible self.fetch_product_types_list(provider=provider) - product_types: List[Dict[str, Any]] = [] + product_types: list[dict[str, Any]] = [] providers_configs = ( list(self.providers_config.values()) @@ -644,7 +644,7 @@ def fetch_product_types_list(self, provider: Optional[str] = None) -> None: providers_to_fetch = [provider] # providers discovery confs that are fetchable - providers_discovery_configs_fetchable: Dict[str, Any] = {} + providers_discovery_configs_fetchable: dict[str, Any] = {} # check if any provider has not already been fetched for product types already_fetched = True for provider_to_fetch in providers_to_fetch: @@ -767,7 +767,7 @@ def fetch_product_types_list(self, provider: Optional[str] = None) -> None: def discover_product_types( self, provider: Optional[str] = None - ) -> Optional[Dict[str, Any]]: + ) -> Optional[dict[str, Any]]: """Fetch providers for product types :param provider: The name of a provider or provider-group to fetch. Defaults to @@ -787,7 +787,7 @@ def discover_product_types( raise UnsupportedProvider( f"The requested provider is not (yet) supported: {provider}" ) - ext_product_types_conf: Dict[str, Any] = {} + ext_product_types_conf: dict[str, Any] = {} providers_to_fetch = [ p for p in ( @@ -800,7 +800,7 @@ def discover_product_types( else self.available_providers() ) ] - kwargs: Dict[str, Any] = {} + kwargs: dict[str, Any] = {} for provider in providers_to_fetch: if hasattr(self.providers_config[provider], "search"): search_plugin_config = self.providers_config[provider].search @@ -841,7 +841,7 @@ def discover_product_types( return sort_dict(ext_product_types_conf) def update_product_types_list( - self, ext_product_types_conf: Dict[str, Optional[Dict[str, Dict[str, Any]]]] + self, ext_product_types_conf: dict[str, Optional[dict[str, dict[str, Any]]]] ) -> None: """Update eodag product types list @@ -869,7 +869,7 @@ def update_product_types_list( provider, ) continue - new_product_types: List[str] = [] + new_product_types: list[str] = [] for ( new_product_type, new_product_type_conf, @@ -932,7 +932,7 @@ def update_product_types_list( def available_providers( self, product_type: Optional[str] = None, by_group: bool = False - ) -> List[str]: + ) -> list[str]: """Gives the sorted list of the available providers or groups The providers or groups are sorted first by their priority level in descending order, @@ -959,7 +959,7 @@ def available_providers( # If by_group is True, keep only the highest priority for each group if by_group: - group_priority: Dict[str, int] = {} + group_priority: dict[str, int] = {} for name, priority in providers: if name not in group_priority or priority > group_priority[name]: group_priority[name] = priority @@ -1026,7 +1026,7 @@ def guess_product_type( missionStartDate: Optional[str] = None, missionEndDate: Optional[str] = None, **kwargs: Any, - ) -> List[str]: + ) -> list[str]: """ Find EODAG product type IDs that best match a set of search parameters. @@ -1084,7 +1084,7 @@ def guess_product_type( query = p.parse(text) results = searcher.search(query, limit=None) - guesses: List[Dict[str, str]] = [dict(r) for r in results or []] + guesses: list[dict[str, str]] = [dict(r) for r in results or []] # datetime filtering if missionStartDate or missionEndDate: @@ -1125,8 +1125,8 @@ def search( raise_errors: bool = False, start: Optional[str] = None, end: Optional[str] = None, - geom: Optional[Union[str, Dict[str, float], BaseGeometry]] = None, - locations: Optional[Dict[str, str]] = None, + geom: Optional[Union[str, dict[str, float], BaseGeometry]] = None, + locations: Optional[dict[str, str]] = None, provider: Optional[str] = None, count: bool = False, **kwargs: Any, @@ -1205,7 +1205,7 @@ def search( items_per_page=items_per_page, ) - errors: List[Tuple[str, Exception]] = [] + errors: list[tuple[str, Exception]] = [] # Loop over available providers and return the first non-empty results for i, search_plugin in enumerate(search_plugins): search_plugin.clear() @@ -1234,8 +1234,8 @@ def search_iter_page( items_per_page: int = DEFAULT_ITEMS_PER_PAGE, start: Optional[str] = None, end: Optional[str] = None, - geom: Optional[Union[str, Dict[str, float], BaseGeometry]] = None, - locations: Optional[Dict[str, str]] = None, + geom: Optional[Union[str, dict[str, float], BaseGeometry]] = None, + locations: Optional[dict[str, str]] = None, **kwargs: Any, ) -> Iterator[SearchResult]: """Iterate over the pages of a products search. @@ -1411,8 +1411,8 @@ def search_all( items_per_page: Optional[int] = None, start: Optional[str] = None, end: Optional[str] = None, - geom: Optional[Union[str, Dict[str, float], BaseGeometry]] = None, - locations: Optional[Dict[str, str]] = None, + geom: Optional[Union[str, dict[str, float], BaseGeometry]] = None, + locations: Optional[dict[str, str]] = None, **kwargs: Any, ) -> SearchResult: """Search and return all the products matching the search criteria. @@ -1633,7 +1633,7 @@ def _fetch_external_product_type(self, provider: str, product_type: str): if not getattr(plugin.config, "discover_product_types", {}).get("fetch_url"): return None - kwargs: Dict[str, Any] = {"productType": product_type} + kwargs: dict[str, Any] = {"productType": product_type} # append auth if needed if getattr(plugin.config, "need_auth", False): @@ -1651,11 +1651,11 @@ def _prepare_search( self, start: Optional[str] = None, end: Optional[str] = None, - geom: Optional[Union[str, Dict[str, float], BaseGeometry]] = None, - locations: Optional[Dict[str, str]] = None, + geom: Optional[Union[str, dict[str, float], BaseGeometry]] = None, + locations: Optional[dict[str, str]] = None, provider: Optional[str] = None, **kwargs: Any, - ) -> Tuple[List[Union[Search, Api]], Dict[str, Any]]: + ) -> tuple[list[Union[Search, Api]], dict[str, Any]]: """Internal method to prepare the search kwargs and get the search plugins. Product query: @@ -1763,7 +1763,7 @@ def _prepare_search( preferred_provider = self.get_preferred_provider()[0] - search_plugins: List[Union[Search, Api]] = [] + search_plugins: list[Union[Search, Api]] = [] for plugin in self._plugins_manager.get_search_plugins( product_type=product_type, provider=provider ): @@ -1833,10 +1833,10 @@ def _do_search( max_items_per_page, ) - results: List[EOProduct] = [] + results: list[EOProduct] = [] total_results: Optional[int] = 0 if count else None - errors: List[Tuple[str, Exception]] = [] + errors: list[tuple[str, Exception]] = [] try: prep = PreparedSearch(count=count) @@ -1984,7 +1984,7 @@ def crunch(self, results: SearchResult, **kwargs: Any) -> SearchResult: return results @staticmethod - def group_by_extent(searches: List[SearchResult]) -> List[SearchResult]: + def group_by_extent(searches: list[SearchResult]) -> list[SearchResult]: """Combines multiple SearchResults and return a list of SearchResults grouped by extent (i.e. bounding box). @@ -1993,7 +1993,7 @@ def group_by_extent(searches: List[SearchResult]) -> List[SearchResult]: """ # Dict with extents as keys, each extent being defined by a str # "{minx}{miny}{maxx}{maxy}" (each float rounded to 2 dec). - products_grouped_by_extent: Dict[str, Any] = {} + products_grouped_by_extent: dict[str, Any] = {} for search in searches: for product in search: @@ -2015,7 +2015,7 @@ def download_all( wait: float = DEFAULT_DOWNLOAD_WAIT, timeout: float = DEFAULT_DOWNLOAD_TIMEOUT, **kwargs: Unpack[DownloadConf], - ) -> List[str]: + ) -> list[str]: """Download all products resulting from a search. :param search_result: A collection of EO products resulting from a search @@ -2273,7 +2273,7 @@ def list_queryables( properties, associating parameters to their annotated type, and a additional_properties attribute """ # only fetch providers if product type is not found - available_product_types: List[str] = [ + available_product_types: list[str] = [ pt["ID"] for pt in self.list_product_types(provider=provider, fetch_providers=False) ] @@ -2310,7 +2310,7 @@ def list_queryables( for plugin in self._plugins_manager.get_search_plugins(product_type, provider): # attach product type config - product_type_configs: Dict[str, Any] = {} + product_type_configs: dict[str, Any] = {} if product_type: self._attach_product_type_config(plugin, product_type) product_type_configs[product_type] = plugin.config.product_type_config @@ -2354,7 +2354,7 @@ def list_queryables( **queryable_properties, ) - def available_sortables(self) -> Dict[str, Optional[ProviderSortables]]: + def available_sortables(self) -> dict[str, Optional[ProviderSortables]]: """For each provider, gives its available sortable parameter(s) and its maximum number of them if it supports the sorting feature, otherwise gives None. @@ -2362,7 +2362,7 @@ def available_sortables(self) -> Dict[str, Optional[ProviderSortables]]: its (their) maximum number as value(s). :raises: :class:`~eodag.utils.exceptions.UnsupportedProvider` """ - sortables: Dict[str, Optional[ProviderSortables]] = {} + sortables: dict[str, Optional[ProviderSortables]] = {} provider_search_plugins = self._plugins_manager.get_search_plugins() for provider_search_plugin in provider_search_plugins: provider = provider_search_plugin.provider diff --git a/eodag/api/product/_assets.py b/eodag/api/product/_assets.py index 5068eb641..60f5d5191 100644 --- a/eodag/api/product/_assets.py +++ b/eodag/api/product/_assets.py @@ -19,7 +19,7 @@ import re from collections import UserDict -from typing import TYPE_CHECKING, Any, Dict, List, Optional +from typing import TYPE_CHECKING, Any, Optional from eodag.utils.exceptions import NotAvailableError from eodag.utils.repr import dict_to_html_table @@ -45,10 +45,10 @@ def __init__(self, product: EOProduct, *args: Any, **kwargs: Any) -> None: self.product = product super(AssetsDict, self).__init__(*args, **kwargs) - def __setitem__(self, key: str, value: Dict[str, Any]) -> None: + def __setitem__(self, key: str, value: dict[str, Any]) -> None: super().__setitem__(key, Asset(self.product, key, value)) - def as_dict(self) -> Dict[str, Any]: + def as_dict(self) -> dict[str, Any]: """Builds a representation of AssetsDict to enable its serialization :returns: The representation of a :class:`~eodag.api.product._assets.AssetsDict` @@ -56,7 +56,7 @@ def as_dict(self) -> Dict[str, Any]: """ return {k: v.as_dict() for k, v in self.data.items()} - def get_values(self, asset_filter: str = "") -> List[Asset]: + def get_values(self, asset_filter: str = "") -> list[Asset]: """ retrieves the assets matching the given filter @@ -138,7 +138,7 @@ def __init__(self, product: EOProduct, key: str, *args: Any, **kwargs: Any) -> N self.key = key super(Asset, self).__init__(*args, **kwargs) - def as_dict(self) -> Dict[str, Any]: + def as_dict(self) -> dict[str, Any]: """Builds a representation of Asset to enable its serialization :returns: The representation of a :class:`~eodag.api.product._assets.Asset` as a diff --git a/eodag/api/product/_product.py b/eodag/api/product/_product.py index f7bd1c759..a6577d330 100644 --- a/eodag/api/product/_product.py +++ b/eodag/api/product/_product.py @@ -22,7 +22,7 @@ import os import re import tempfile -from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Optional, Union import requests from requests import RequestException @@ -113,7 +113,7 @@ class EOProduct: """ provider: str - properties: Dict[str, Any] + properties: dict[str, Any] product_type: Optional[str] location: str filename: str @@ -124,7 +124,7 @@ class EOProduct: assets: AssetsDict def __init__( - self, provider: str, properties: Dict[str, Any], **kwargs: Any + self, provider: str, properties: dict[str, Any], **kwargs: Any ) -> None: self.provider = provider self.product_type = kwargs.get("productType") @@ -175,7 +175,7 @@ def __init__( self.downloader: Optional[Union[Api, Download]] = None self.downloader_auth: Optional[Authentication] = None - def as_dict(self) -> Dict[str, Any]: + def as_dict(self) -> dict[str, Any]: """Builds a representation of EOProduct as a dictionary to enable its geojson serialization @@ -186,7 +186,7 @@ def as_dict(self) -> Dict[str, Any]: if self.search_intersection is not None: search_intersection = geometry.mapping(self.search_intersection) - geojson_repr: Dict[str, Any] = { + geojson_repr: dict[str, Any] = { "type": "Feature", "geometry": geometry.mapping(self.geometry), "id": self.properties["id"], @@ -206,7 +206,7 @@ def as_dict(self) -> Dict[str, Any]: return geojson_repr @classmethod - def from_geojson(cls, feature: Dict[str, Any]) -> EOProduct: + def from_geojson(cls, feature: dict[str, Any]) -> EOProduct: """Builds an :class:`~eodag.api.product._product.EOProduct` object from its representation as geojson @@ -356,7 +356,7 @@ def download( def _init_progress_bar( self, progress_callback: Optional[ProgressCallback] - ) -> Tuple[ProgressCallback, bool]: + ) -> tuple[ProgressCallback, bool]: # progress bar init if progress_callback is None: progress_callback = ProgressCallback(position=1) diff --git a/eodag/api/product/metadata_mapping.py b/eodag/api/product/metadata_mapping.py index 9ba1f3485..2d25c0fb4 100644 --- a/eodag/api/product/metadata_mapping.py +++ b/eodag/api/product/metadata_mapping.py @@ -23,19 +23,7 @@ import re from datetime import datetime, timedelta from string import Formatter -from typing import ( - TYPE_CHECKING, - Any, - AnyStr, - Callable, - Dict, - Iterator, - List, - Optional, - Tuple, - Union, - cast, -) +from typing import TYPE_CHECKING, Any, AnyStr, Callable, Iterator, Optional, Union, cast import geojson import orjson @@ -88,8 +76,8 @@ def get_metadata_path( - map_value: Union[str, List[str]], -) -> Tuple[Union[List[str], None], str]: + map_value: Union[str, list[str]], +) -> tuple[Union[list[str], None], str]: """Return the jsonpath or xpath to the value of a EO product metadata in a provider search result. @@ -137,12 +125,12 @@ def get_metadata_path( return None, path -def get_metadata_path_value(map_value: Union[str, List[str]]) -> str: +def get_metadata_path_value(map_value: Union[str, list[str]]) -> str: """Get raw metadata path without converter""" return map_value[1] if isinstance(map_value, list) else map_value -def get_search_param(map_value: List[str]) -> str: +def get_search_param(map_value: list[str]) -> str: """See :func:`~eodag.api.product.metadata_mapping.get_metadata_path` :param map_value: The value originating from the definition of `metadata_mapping` @@ -335,7 +323,7 @@ def convert_to_rounded_wkt(value: BaseGeometry) -> str: return wkt_value @staticmethod - def convert_to_bounds_lists(input_geom: BaseGeometry) -> List[List[float]]: + def convert_to_bounds_lists(input_geom: BaseGeometry) -> list[list[float]]: if isinstance(input_geom, MultiPolygon): geoms = [geom for geom in input_geom.geoms] # sort with larger one at first (stac-browser only plots first one) @@ -345,7 +333,7 @@ def convert_to_bounds_lists(input_geom: BaseGeometry) -> List[List[float]]: return [list(input_geom.bounds[0:4])] @staticmethod - def convert_to_bounds(input_geom_unformatted: Any) -> List[float]: + def convert_to_bounds(input_geom_unformatted: Any) -> list[float]: input_geom = get_geometry_from_various(geometry=input_geom_unformatted) if isinstance(input_geom, MultiPolygon): geoms = [geom for geom in input_geom.geoms] @@ -365,7 +353,7 @@ def convert_to_bounds(input_geom_unformatted: Any) -> List[float]: return list(input_geom.bounds[0:4]) @staticmethod - def convert_to_nwse_bounds(input_geom: BaseGeometry) -> List[float]: + def convert_to_nwse_bounds(input_geom: BaseGeometry) -> list[float]: if isinstance(input_geom, str): input_geom = shapely.wkt.loads(input_geom) return list(input_geom.bounds[-1:] + input_geom.bounds[:-1]) @@ -449,7 +437,7 @@ def flatten_elements(nested) -> Iterator[Any]: else: yield e - polygons_list: List[Polygon] = [] + polygons_list: list[Polygon] = [] for elem in flatten_elements(georss[0]): coords_list = elem.text.split() polygon_args = [ @@ -474,7 +462,7 @@ def flatten_elements(nested) -> Iterator[Any]: @staticmethod def convert_to_longitude_latitude( input_geom_unformatted: Any, - ) -> Dict[str, float]: + ) -> dict[str, float]: bounds = MetadataFormatter.convert_to_bounds(input_geom_unformatted) lon = (bounds[0] + bounds[2]) / 2 lat = (bounds[1] + bounds[3]) / 2 @@ -514,8 +502,8 @@ def convert_replace_str(string: str, args: str) -> str: @staticmethod def convert_recursive_sub_str( - input_obj: Union[Dict[Any, Any], List[Any]], args: str - ) -> Union[Dict[Any, Any], List[Any]]: + input_obj: Union[dict[Any, Any], list[Any]], args: str + ) -> Union[dict[Any, Any], list[Any]]: old, new = ast.literal_eval(args) return items_recursive_apply( input_obj, @@ -525,8 +513,8 @@ def convert_recursive_sub_str( @staticmethod def convert_dict_update( - input_dict: Dict[Any, Any], args: str - ) -> Dict[Any, Any]: + input_dict: dict[Any, Any], args: str + ) -> dict[Any, Any]: """Converts""" new_items_list = ast.literal_eval(args) @@ -536,8 +524,8 @@ def convert_dict_update( @staticmethod def convert_dict_filter( - input_dict: Dict[Any, Any], jsonpath_filter_str: str - ) -> Dict[Any, Any]: + input_dict: dict[Any, Any], jsonpath_filter_str: str + ) -> dict[Any, Any]: """Fitlers dict items using jsonpath""" jsonpath_filter = string_to_jsonpath(jsonpath_filter_str, force=True) @@ -616,8 +604,8 @@ def convert_s2msil2a_title_to_aws_productinfo(string: str) -> str: return NOT_AVAILABLE @staticmethod - def convert_split_id_into_s1_params(product_id: str) -> Dict[str, str]: - parts: List[str] = re.split(r"_(?!_)", product_id) + def convert_split_id_into_s1_params(product_id: str) -> dict[str, str]: + parts: list[str] = re.split(r"_(?!_)", product_id) if len(parts) < 9: logger.error( "id %s does not match expected Sentinel-1 id format", product_id @@ -651,8 +639,8 @@ def convert_split_id_into_s1_params(product_id: str) -> Dict[str, str]: return params @staticmethod - def convert_split_id_into_s3_params(product_id: str) -> Dict[str, str]: - parts: List[str] = re.split(r"_(?!_)", product_id) + def convert_split_id_into_s3_params(product_id: str) -> dict[str, str]: + parts: list[str] = re.split(r"_(?!_)", product_id) params = {"productType": product_id[4:15]} dates = re.findall("[0-9]{8}T[0-9]{6}", product_id) start_date = datetime.strptime(dates[0], "%Y%m%dT%H%M%S") - timedelta( @@ -668,8 +656,8 @@ def convert_split_id_into_s3_params(product_id: str) -> Dict[str, str]: return params @staticmethod - def convert_split_id_into_s5p_params(product_id: str) -> Dict[str, str]: - parts: List[str] = re.split(r"_(?!_)", product_id) + def convert_split_id_into_s5p_params(product_id: str) -> dict[str, str]: + parts: list[str] = re.split(r"_(?!_)", product_id) params = { "productType": product_id[9:19], "processingMode": parts[1], @@ -686,7 +674,7 @@ def convert_split_id_into_s5p_params(product_id: str) -> Dict[str, str]: return params @staticmethod - def convert_split_cop_dem_id(product_id: str) -> List[int]: + def convert_split_cop_dem_id(product_id: str) -> list[int]: parts = product_id.split("_") lattitude = parts[3] longitude = parts[5] @@ -725,7 +713,7 @@ def convert_dates_from_cmems_id(product_id: str): @staticmethod def convert_to_datetime_dict( date: str, format: str - ) -> Dict[str, Union[List[str], str]]: + ) -> dict[str, Union[list[str], str]]: """Convert a date (str) to a dictionary where values are in the format given in argument date == "2021-04-21T18:27:19.123Z" and format == "list" => { @@ -777,7 +765,7 @@ def convert_to_datetime_dict( @staticmethod def convert_interval_to_datetime_dict( date: str, separator: str = "/" - ) -> Dict[str, List[str]]: + ) -> dict[str, list[str]]: """Convert a date interval ('/' separated str) to a dictionary where values are lists date == "2021-04-21/2021-04-22" => { @@ -817,7 +805,7 @@ def convert_interval_to_datetime_dict( } @staticmethod - def convert_get_ecmwf_time(date: str) -> List[str]: + def convert_get_ecmwf_time(date: str) -> list[str]: """Get the time of a date (str) in the ECMWF format (["HH:00"]) "2021-04-21T18:27:19.123Z" => ["18:00"] @@ -861,8 +849,8 @@ def convert_get_variables_from_path(path: str): @staticmethod def convert_assets_list_to_dict( - assets_list: List[Dict[str, str]], asset_name_key: str = "title" - ) -> Dict[str, Dict[str, str]]: + assets_list: list[dict[str, str]], asset_name_key: str = "title" + ) -> dict[str, dict[str, str]]: """Convert a list of assets to a dictionary where keys represent name of assets and are found among values of asset dictionaries. @@ -889,8 +877,8 @@ def convert_assets_list_to_dict( "asset3": {"href": "qux", "title": "qux-title", "name": "asset3"}, } """ - asset_names: List[str] = [] - assets_dict: Dict[str, Dict[str, str]] = {} + asset_names: list[str] = [] + assets_dict: dict[str, dict[str, str]] = {} for asset in assets_list: asset_name = asset[asset_name_key] @@ -899,7 +887,7 @@ def convert_assets_list_to_dict( # we only keep the equivalent of the path basename in the case where the # asset name has a path pattern and this basename is only found once - immutable_asset_indexes: List[int] = [] + immutable_asset_indexes: list[int] = [] for i, asset_name in enumerate(asset_names): if i in immutable_asset_indexes: continue @@ -925,10 +913,10 @@ def convert_assets_list_to_dict( def properties_from_json( - json: Dict[str, Any], - mapping: Dict[str, Any], - discovery_config: Optional[Dict[str, Any]] = None, -) -> Dict[str, Any]: + json: dict[str, Any], + mapping: dict[str, Any], + discovery_config: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: """Extract properties from a provider json result. :param json: The representation of a provider result as a json object @@ -941,7 +929,7 @@ def properties_from_json( `discovery_path` (String representation of jsonpath) :returns: The metadata of the :class:`~eodag.api.product._product.EOProduct` """ - properties: Dict[str, Any] = {} + properties: dict[str, Any] = {} templates = {} used_jsonpaths = [] for metadata, value in mapping.items(): @@ -1085,8 +1073,8 @@ def properties_from_xml( xml_as_text: AnyStr, mapping: Any, empty_ns_prefix: str = "ns", - discovery_config: Optional[Dict[str, Any]] = None, -) -> Dict[str, Any]: + discovery_config: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: """Extract properties from a provider xml result. :param xml_as_text: The representation of a provider result as xml @@ -1104,7 +1092,7 @@ def properties_from_xml( `discovery_path` (String representation of xpath) :returns: the metadata of the :class:`~eodag.api.product._product.EOProduct` """ - properties: Dict[str, Any] = {} + properties: dict[str, Any] = {} templates = {} used_xpaths = [] root = etree.XML(xml_as_text) @@ -1232,10 +1220,10 @@ def properties_from_xml( def mtd_cfg_as_conversion_and_querypath( - src_dict: Dict[str, Any], - dest_dict: Dict[str, Any] = {}, + src_dict: dict[str, Any], + dest_dict: dict[str, Any] = {}, result_type: str = "json", -) -> Dict[str, Any]: +) -> dict[str, Any]: """Metadata configuration dictionary to querypath with conversion dictionary Transform every src_dict value from jsonpath_str to tuple `(conversion, jsonpath_object)` or from xpath_str to tuple `(conversion, xpath_str)` @@ -1283,8 +1271,8 @@ def mtd_cfg_as_conversion_and_querypath( def format_query_params( - product_type: str, config: PluginConfig, query_dict: Dict[str, Any] -) -> Dict[str, Any]: + product_type: str, config: PluginConfig, query_dict: dict[str, Any] +) -> dict[str, Any]: """format the search parameters to query parameters""" if "raise_errors" in query_dict.keys(): del query_dict["raise_errors"] @@ -1296,7 +1284,7 @@ def format_query_params( **config.products.get(product_type, {}).get("metadata_mapping", {}), ) - query_params: Dict[str, Any] = {} + query_params: dict[str, Any] = {} # Get all the search parameters that are recognised as queryables by the # provider (they appear in the queryables dictionary) queryables = _get_queryables(query_dict, config, product_type_metadata_mapping) @@ -1386,10 +1374,10 @@ def _resolve_hashes(formatted_query_param: str) -> str: def _format_free_text_search( - config: PluginConfig, metadata_mapping: Dict[str, Any], **kwargs: Any -) -> Dict[str, Any]: + config: PluginConfig, metadata_mapping: dict[str, Any], **kwargs: Any +) -> dict[str, Any]: """Build the free text search parameter using the search parameters""" - query_params: Dict[str, Any] = {} + query_params: dict[str, Any] = {} if not getattr(config, "free_text_search_operations", None): return query_params for param, operations_config in config.free_text_search_operations.items(): @@ -1428,13 +1416,13 @@ def _format_free_text_search( def _get_queryables( - search_params: Dict[str, Any], + search_params: dict[str, Any], config: PluginConfig, - metadata_mapping: Dict[str, Any], -) -> Dict[str, Any]: + metadata_mapping: dict[str, Any], +) -> dict[str, Any]: """Retrieve the metadata mappings that are query-able""" logger.debug("Retrieving queryable metadata from metadata_mapping") - queryables: Dict[str, Any] = {} + queryables: dict[str, Any] = {} for eodag_search_key, user_input in search_params.items(): if user_input is not None: md_mapping = metadata_mapping.get(eodag_search_key, (None, NOT_MAPPED)) @@ -1481,7 +1469,7 @@ def _get_queryables( def get_queryable_from_provider( - provider_queryable: str, metadata_mapping: Dict[str, Union[str, List[str]]] + provider_queryable: str, metadata_mapping: dict[str, Union[str, list[str]]] ) -> Optional[str]: """Get EODAG configured queryable parameter from provider queryable parameter @@ -1505,7 +1493,7 @@ def get_queryable_from_provider( def get_provider_queryable_path( - queryable: str, metadata_mapping: Dict[str, Union[str, List[str]]] + queryable: str, metadata_mapping: dict[str, Union[str, list[str]]] ) -> Optional[str]: """Get EODAG configured queryable path from its parameter @@ -1522,8 +1510,8 @@ def get_provider_queryable_path( def get_provider_queryable_key( eodag_key: str, - provider_queryables: Dict[str, Any], - metadata_mapping: Dict[str, Union[List[Any], str]], + provider_queryables: dict[str, Any], + metadata_mapping: dict[str, Union[list[Any], str]], ) -> str: """Finds the provider queryable corresponding to the given eodag key based on the metadata mapping diff --git a/eodag/api/search_result.py b/eodag/api/search_result.py index d516ec468..b6b6519e3 100644 --- a/eodag/api/search_result.py +++ b/eodag/api/search_result.py @@ -18,17 +18,7 @@ from __future__ import annotations from collections import UserList -from typing import ( - TYPE_CHECKING, - Annotated, - Any, - Dict, - Iterable, - List, - Optional, - Tuple, - Union, -) +from typing import TYPE_CHECKING, Annotated, Any, Iterable, Optional, Union from shapely.geometry import GeometryCollection, shape from typing_extensions import Doc @@ -56,17 +46,17 @@ class SearchResult(UserList): :ivar number_matched: Estimated total number of matching results """ - data: List[EOProduct] + data: list[EOProduct] errors: Annotated[ - List[Tuple[str, Exception]], Doc("Tuple of provider name, exception") + list[tuple[str, Exception]], Doc("Tuple of provider name, exception") ] def __init__( self, - products: List[EOProduct], + products: list[EOProduct], number_matched: Optional[int] = None, - errors: List[Tuple[str, Exception]] = [], + errors: list[tuple[str, Exception]] = [], ) -> None: super().__init__(products) self.number_matched = number_matched @@ -92,7 +82,7 @@ def filter_date( return self.crunch(FilterDate(dict(start=start, end=end))) def filter_latest_intersect( - self, geometry: Union[Dict[str, Any], BaseGeometry, Any] + self, geometry: Union[dict[str, Any], BaseGeometry, Any] ) -> SearchResult: """ Apply :class:`~eodag.plugins.crunch.filter_latest_intersect.FilterLatestIntersect` crunch, @@ -148,7 +138,7 @@ def filter_online(self) -> SearchResult: return self.filter_property(storageStatus="ONLINE") @staticmethod - def from_geojson(feature_collection: Dict[str, Any]) -> SearchResult: + def from_geojson(feature_collection: dict[str, Any]) -> SearchResult: """Builds an :class:`~eodag.api.search_result.SearchResult` object from its representation as geojson :param feature_collection: A collection representing a search result. @@ -161,7 +151,7 @@ def from_geojson(feature_collection: Dict[str, Any]) -> SearchResult: ] ) - def as_geojson_object(self) -> Dict[str, Any]: + def as_geojson_object(self) -> dict[str, Any]: """GeoJSON representation of SearchResult""" return { "type": "FeatureCollection", @@ -182,7 +172,7 @@ def as_wkt_object(self) -> str: return self.as_shapely_geometry_object().wkt @property - def __geo_interface__(self) -> Dict[str, Any]: + def __geo_interface__(self) -> dict[str, Any]: """Implements the geo-interface protocol. See https://gist.github.com/sgillies/2217756 @@ -230,9 +220,9 @@ class RawSearchResult(UserList): :param results: A list of raw/unparsed search results """ - data: List[Any] - query_params: Dict[str, Any] - product_type_def_params: Dict[str, Any] + data: list[Any] + query_params: dict[str, Any] + product_type_def_params: dict[str, Any] - def __init__(self, results: List[Any]) -> None: + def __init__(self, results: list[Any]) -> None: super(RawSearchResult, self).__init__(results) diff --git a/eodag/cli.py b/eodag/cli.py index aab78a10d..275f76e1a 100755 --- a/eodag/cli.py +++ b/eodag/cli.py @@ -48,7 +48,7 @@ import sys import textwrap from importlib.metadata import metadata -from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Set +from typing import TYPE_CHECKING, Any, Mapping import click @@ -104,7 +104,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super(MutuallyExclusiveOption, self).__init__(*args, **kwargs) def handle_parse_result( - self, ctx: Context, opts: Mapping[str, Any], args: List[str] + self, ctx: Context, opts: Mapping[str, Any], args: list[str] ): """Raise error or use parent handle_parse_result()""" if self.mutually_exclusive.intersection(opts) and self.name in opts: @@ -359,9 +359,9 @@ def search_crunch(ctx: Context, **kwargs: Any) -> None: count = kwargs.pop("count") # Process inputs for crunch - cruncher_names: Set[Any] = set(kwargs.pop("cruncher") or []) + cruncher_names: set[Any] = set(kwargs.pop("cruncher") or []) cruncher_args = kwargs.pop("cruncher_args") - cruncher_args_dict: Dict[str, Dict[str, Any]] = {} + cruncher_args_dict: dict[str, dict[str, Any]] = {} if cruncher_args: for cruncher, argname, argval in cruncher_args: cruncher_args_dict.setdefault(cruncher, {}).setdefault(argname, argval) diff --git a/eodag/config.py b/eodag/config.py index 88b307707..0ceb1f1f7 100644 --- a/eodag/config.py +++ b/eodag/config.py @@ -24,13 +24,10 @@ from typing import ( Annotated, Any, - Dict, ItemsView, Iterator, - List, Literal, Optional, - Tuple, TypedDict, Union, ValuesView, @@ -79,7 +76,7 @@ class SimpleYamlProxyConfig: def __init__(self, conf_file_path: str) -> None: try: - self.source: Dict[str, Any] = cached_yaml_load(conf_file_path) + self.source: dict[str, Any] = cached_yaml_load(conf_file_path) except yaml.parser.ParserError as e: print("Unable to load user configuration file") raise e @@ -127,12 +124,12 @@ class ProviderConfig(yaml.YAMLObject): name: str group: str priority: int = 0 # Set default priority to 0 - roles: List[str] + roles: list[str] description: str url: str api: PluginConfig search: PluginConfig - products: Dict[str, Any] + products: dict[str, Any] download: PluginConfig auth: PluginConfig search_auth: PluginConfig @@ -154,7 +151,7 @@ def from_yaml(cls, loader: yaml.Loader, node: Any) -> Iterator[ProviderConfig]: return loader.construct_yaml_object(node, cls) @classmethod - def from_mapping(cls, mapping: Dict[str, Any]) -> ProviderConfig: + def from_mapping(cls, mapping: dict[str, Any]) -> ProviderConfig: """Build a :class:`~eodag.config.ProviderConfig` from a mapping""" cls.validate(mapping) for key in PLUGINS_TOPICS_KEYS: @@ -165,7 +162,7 @@ def from_mapping(cls, mapping: Dict[str, Any]) -> ProviderConfig: return c @staticmethod - def validate(config_keys: Union[Tuple[str, ...], Dict[str, Any]]) -> None: + def validate(config_keys: Union[tuple[str, ...], dict[str, Any]]) -> None: """Validate a :class:`~eodag.config.ProviderConfig` :param config_keys: The configurations keys to validate @@ -181,7 +178,7 @@ def validate(config_keys: Union[Tuple[str, ...], Dict[str, Any]]) -> None: "type of plugin" ) - def update(self, mapping: Optional[Dict[str, Any]]) -> None: + def update(self, mapping: Optional[dict[str, Any]]) -> None: """Update the configuration parameters with values from `mapping` :param mapping: The mapping from which to override configuration parameters @@ -197,7 +194,7 @@ def update(self, mapping: Optional[Dict[str, Any]]) -> None: }, ) for key in PLUGINS_TOPICS_KEYS: - current_value: Optional[Dict[str, Any]] = getattr(self, key, None) + current_value: Optional[dict[str, Any]] = getattr(self, key, None) mapping_value = mapping.get(key, {}) if current_value is not None: current_value.update(mapping_value) @@ -240,14 +237,14 @@ class Sort(TypedDict): """Configuration for sort during search""" #: Default sort settings - sort_by_default: List[Tuple[str, str]] + sort_by_default: list[tuple[str, str]] #: F-string template to add to :attr:`~eodag.config.PluginConfig.Pagination.next_page_url_tpl` to sort search #: results sort_by_tpl: str #: Mapping between eodag and provider query parameters used for sort - sort_param_mapping: Dict[str, str] + sort_param_mapping: dict[str, str] #: Mapping between eodag and provider sort-order parameters - sort_order_mapping: Dict[Literal["ascending", "descending"], str] + sort_order_mapping: dict[Literal["ascending", "descending"], str] #: Maximum number of allowed sort parameters per request max_sort_params: Annotated[int, Gt(0)] @@ -271,7 +268,7 @@ class DiscoverProductTypes(TypedDict, total=False): #: HTTP method used to fetch product types fetch_method: str #: Request body to fetch product types using POST method - fetch_body: Dict[str, Any] + fetch_body: dict[str, Any] #: Maximum number of connections for concurrent HTTP requests max_connections: int #: The f-string template for pagination requests. @@ -286,17 +283,17 @@ class DiscoverProductTypes(TypedDict, total=False): generic_product_type_id: str #: Mapping for product type metadata (e.g. ``abstract``, ``licence``) which can be parsed from the provider #: result - generic_product_type_parsable_metadata: Dict[str, str] + generic_product_type_parsable_metadata: dict[str, str] #: Mapping for product type properties which can be parsed from the result and are not product type metadata - generic_product_type_parsable_properties: Dict[str, str] + generic_product_type_parsable_properties: dict[str, str] #: Mapping for product type properties which cannot be parsed from the result and are not product type metadata - generic_product_type_unparsable_properties: Dict[str, str] + generic_product_type_unparsable_properties: dict[str, str] #: URL to fetch data for a single collection single_collection_fetch_url: str #: Query string to be added to the fetch_url to filter for a collection single_collection_fetch_qs: str #: Mapping for product type metadata returned by the endpoint given in single_collection_fetch_url - single_product_type_parsable_metadata: Dict[str, str] + single_product_type_parsable_metadata: dict[str, str] class DiscoverQueryables(TypedDict, total=False): """Configuration for queryables discovery""" @@ -318,7 +315,7 @@ class OrderOnResponse(TypedDict): """Configuration for order on-response during download""" #: Parameters metadata-mapping to apply to the order response - metadata_mapping: Dict[str, Union[str, List[str]]] + metadata_mapping: dict[str, Union[str, list[str]]] class OrderStatusSuccess(TypedDict): """ @@ -351,7 +348,7 @@ class OrderStatusRequest(TypedDict, total=False): #: Request HTTP method method: str #: Request hearders - headers: Dict[str, Any] + headers: dict[str, Any] class OrderStatusOnSuccess(TypedDict, total=False): """Configuration for order status on-success during download""" @@ -363,7 +360,7 @@ class OrderStatusOnSuccess(TypedDict, total=False): #: Key in the success response that gives access to the result results_entry: str #: Metadata-mapping to apply to the success status result - metadata_mapping: Dict[str, Union[str, List[str]]] + metadata_mapping: dict[str, Union[str, list[str]]] class OrderStatus(TypedDict, total=False): """Configuration for order status during download""" @@ -371,11 +368,11 @@ class OrderStatus(TypedDict, total=False): #: Order status request configuration request: PluginConfig.OrderStatusRequest #: Metadata-mapping used to parse order status response - metadata_mapping: Dict[str, Union[str, List[str]]] + metadata_mapping: dict[str, Union[str, list[str]]] #: Configuration to identify order status success during download success: PluginConfig.OrderStatusSuccess #: Part of the order status response that tells there is an error - error: Dict[str, Any] + error: dict[str, Any] #: Configuration to identify order status ordered during download ordered: PluginConfig.OrderStatusOrdered #: Configuration for order status on-success during download @@ -400,7 +397,7 @@ class MetadataPreMapping(TypedDict, total=False): #: :class:`~eodag.plugins.base.PluginTopic` Default s3 bucket s3_bucket: str #: :class:`~eodag.plugins.base.PluginTopic` Authentication error codes - auth_error_code: Union[int, List[int]] + auth_error_code: Union[int, list[int]] #: :class:`~eodag.plugins.base.PluginTopic` Time to wait until request timeout in seconds timeout: float #: :class:`~eodag.plugins.base.PluginTopic` :class:`urllib3.util.Retry` ``total`` parameter, @@ -411,13 +408,13 @@ class MetadataPreMapping(TypedDict, total=False): retry_backoff_factor: int #: :class:`~eodag.plugins.base.PluginTopic` :class:`urllib3.util.Retry` ``status_forcelist`` parameter, #: list of integer HTTP status codes that we should force a retry on - retry_status_forcelist: List[int] + retry_status_forcelist: list[int] # search & api ----------------------------------------------------------------------------------------------------- # copied from ProviderConfig in PluginManager.get_search_plugins() priority: int # per product type metadata-mapping, set in core._prepare_search - product_type_config: Dict[str, Any] + product_type_config: dict[str, Any] #: :class:`~eodag.plugins.search.base.Search` Plugin API endpoint api_endpoint: str @@ -439,15 +436,15 @@ class MetadataPreMapping(TypedDict, total=False): #: :class:`~eodag.plugins.search.base.Search` Configuration for the queryables auto-discovery discover_queryables: PluginConfig.DiscoverQueryables #: :class:`~eodag.plugins.search.base.Search` The mapping between eodag metadata and the plugin specific metadata - metadata_mapping: Dict[str, Union[str, List[str]]] + metadata_mapping: dict[str, Union[str, list[str]]] #: :class:`~eodag.plugins.search.base.Search` Parameters to remove from queryables - remove_from_queryables: List[str] + remove_from_queryables: list[str] #: :class:`~eodag.plugins.search.base.Search` Parameters to be passed as is in the search url query string - literal_search_params: Dict[str, str] + literal_search_params: dict[str, str] #: :class:`~eodag.plugins.search.qssearch.QueryStringSearch` Characters that should not be quoted in the url params - dont_quote: List[str] + dont_quote: list[str] #: :class:`~eodag.plugins.search.qssearch.ODataV4Search` Dict describing free text search request build - free_text_search_operations: Dict[str, Any] + free_text_search_operations: dict[str, Any] #: :class:`~eodag.plugins.search.qssearch.ODataV4Search` Set to ``True`` if the metadata is not given in the search #: result and a two step search has to be performed per_product_metadata_query: bool @@ -464,11 +461,11 @@ class MetadataPreMapping(TypedDict, total=False): #: if date parameters are mandatory in the request dates_required: bool #: :class:`~eodag.plugins.search.csw.CSWSearch` Search definition dictionary - search_definition: Dict[str, Any] + search_definition: dict[str, Any] #: :class:`~eodag.plugins.search.qssearch.PostJsonSearch` Whether to merge responses or not (`aws_eos` specific) merge_responses: bool #: :class:`~eodag.plugins.search.qssearch.PostJsonSearch` Collections names (`aws_eos` specific) - collection: List[str] + collection: list[str] #: :class:`~eodag.plugins.search.static_stac_search.StaticStacSearch` #: Maximum number of connections for concurrent HTTP requests max_connections: int @@ -477,7 +474,7 @@ class MetadataPreMapping(TypedDict, total=False): end_date_excluded: bool #: :class:`~eodag.plugins.search.build_search_result.ECMWFSearch` #: List of parameters used to parse metadata but that must not be included to the query - remove_from_query: List[str] + remove_from_query: list[str] #: :class:`~eodag.plugins.search.csw.CSWSearch` #: OGC Catalogue Service version version: str @@ -501,13 +498,13 @@ class MetadataPreMapping(TypedDict, total=False): #: :class:`~eodag.plugins.download.base.Download` Whether ignore assets and download using ``downloadLink`` or not ignore_assets: bool #: :class:`~eodag.plugins.download.base.Download` Product type specific configuration - products: Dict[str, Dict[str, Any]] + products: dict[str, dict[str, Any]] #: :class:`~eodag.plugins.download.http.HTTPDownload` Whether the product has to be ordered to download it or not order_enabled: bool #: :class:`~eodag.plugins.download.http.HTTPDownload` HTTP request method for the order request order_method: str #: :class:`~eodag.plugins.download.http.HTTPDownload` Headers to be added to the order request - order_headers: Dict[str, str] + order_headers: dict[str, str] #: :class:`~eodag.plugins.download.http.HTTPDownload` #: Dictionary containing the key :attr:`~eodag.config.PluginConfig.metadata_mapping` which can be used to add new #: product properties based on the data in response to the order request @@ -518,7 +515,7 @@ class MetadataPreMapping(TypedDict, total=False): #: Do not authenticate the download request but only the order and order status ones no_auth_download: bool #: :class:`~eodag.plugins.download.http.HTTPDownload` Parameters to be added to the query params of the request - dl_url_params: Dict[str, str] + dl_url_params: dict[str, str] #: :class:`~eodag.plugins.download.s3rest.S3RestDownload` #: At which level of the path part of the url the bucket can be found bucket_path_level: int @@ -529,15 +526,15 @@ class MetadataPreMapping(TypedDict, total=False): # auth ------------------------------------------------------------------------------------------------------------- #: :class:`~eodag.plugins.authentication.base.Authentication` Authentication credentials dictionary - credentials: Dict[str, str] + credentials: dict[str, str] #: :class:`~eodag.plugins.authentication.base.Authentication` Authentication URL auth_uri: str #: :class:`~eodag.plugins.authentication.base.Authentication` #: Dictionary containing all keys/value pairs that should be added to the headers - headers: Dict[str, str] + headers: dict[str, str] #: :class:`~eodag.plugins.authentication.base.Authentication` #: Dictionary containing all keys/value pairs that should be added to the headers for token retrieve only - retrieve_headers: Dict[str, str] + retrieve_headers: dict[str, str] #: :class:`~eodag.plugins.authentication.base.Authentication` #: The key pointing to the token in the response from the token server token_key: str @@ -549,7 +546,7 @@ class MetadataPreMapping(TypedDict, total=False): matching_url: str #: :class:`~eodag.plugins.authentication.base.Authentication` Part of the search or download plugin configuration #: that needs authentication - matching_conf: Dict[str, Any] + matching_conf: dict[str, Any] #: :class:`~eodag.plugins.authentication.openid_connect.OIDCRefreshTokenBase` #: How the token should be used in the request token_provision: str @@ -561,7 +558,7 @@ class MetadataPreMapping(TypedDict, total=False): #: The OIDC provider's ``.well-known/openid-configuration`` url. oidc_config_url: str #: :class:`~eodag.plugins.authentication.openid_connect.OIDCRefreshTokenBase` The OIDC token audiences - allowed_audiences: List[str] + allowed_audiences: list[str] #: :class:`~eodag.plugins.authentication.openid_connect.OIDCAuthorizationCodeFlowAuth` #: Whether a user consent is needed during the authentication or not user_consent_needed: str @@ -585,16 +582,16 @@ class MetadataPreMapping(TypedDict, total=False): user_consent_form_xpath: str #: :class:`~eodag.plugins.authentication.openid_connect.OIDCAuthorizationCodeFlowAuth` #: The data that will be passed with the POST request on the form 'action' URL - user_consent_form_data: Dict[str, str] + user_consent_form_data: dict[str, str] #: :class:`~eodag.plugins.authentication.openid_connect.OIDCAuthorizationCodeFlowAuth` #: Additional data to be passed to the login POST request - additional_login_form_data: Dict[str, str] + additional_login_form_data: dict[str, str] #: :class:`~eodag.plugins.authentication.openid_connect.OIDCAuthorizationCodeFlowAuth` #: Key/value pairs of patterns/messages used for Authentication errors - exchange_url_error_pattern: Dict[str, str] + exchange_url_error_pattern: dict[str, str] #: :class:`~eodag.plugins.authentication.openid_connect.OIDCAuthorizationCodeFlowAuth` #: A mapping between OIDC url query string and token handler query string params - token_exchange_params: Dict[str, str] + token_exchange_params: dict[str, str] #: :class:`~eodag.plugins.authentication.openid_connect.OIDCAuthorizationCodeFlowAuth` #: Refers to the name of the query param to be used in the query request token_qs_key: str @@ -608,7 +605,7 @@ class MetadataPreMapping(TypedDict, total=False): signed_url_key: str #: :class:`~eodag.plugins.authentication.token.TokenAuth` #: Credentials json structure if they should be sent as POST data - req_data: Dict[str, Any] + req_data: dict[str, Any] #: :class:`~eodag.plugins.authentication.token.TokenAuth` #: URL used to fetch the access token with a refresh token refresh_uri: str @@ -618,7 +615,7 @@ class MetadataPreMapping(TypedDict, total=False): #: :class:`~eodag.plugins.authentication.token_exchange.OIDCTokenExchangeAuth` #: The full :class:`~eodag.plugins.authentication.openid_connect.OIDCAuthorizationCodeFlowAuth` plugin configuration #: used to retrieve subject token - subject: Dict[str, Any] + subject: dict[str, Any] #: :class:`~eodag.plugins.authentication.token_exchange.OIDCTokenExchangeAuth` #: Identifies the issuer of the `subject_token` subject_issuer: str @@ -640,21 +637,21 @@ def from_yaml(cls, loader: yaml.Loader, node: Any) -> PluginConfig: return loader.construct_yaml_object(node, cls) @classmethod - def from_mapping(cls, mapping: Dict[str, Any]) -> PluginConfig: + def from_mapping(cls, mapping: dict[str, Any]) -> PluginConfig: """Build a :class:`~eodag.config.PluginConfig` from a mapping""" c = cls() c.__dict__.update(mapping) return c @staticmethod - def validate(config_keys: Tuple[Any, ...]) -> None: + def validate(config_keys: tuple[Any, ...]) -> None: """Validate a :class:`~eodag.config.PluginConfig`""" if "type" not in config_keys: raise ValidationError( "A Plugin config must specify the Plugin it configures" ) - def update(self, mapping: Optional[Dict[Any, Any]]) -> None: + def update(self, mapping: Optional[dict[Any, Any]]) -> None: """Update the configuration parameters with values from `mapping` :param mapping: The mapping from which to override configuration parameters @@ -666,7 +663,7 @@ def update(self, mapping: Optional[Dict[Any, Any]]) -> None: ) -def load_default_config() -> Dict[str, ProviderConfig]: +def load_default_config() -> dict[str, ProviderConfig]: """Load the providers configuration into a dictionary. Load from eodag `resources/providers.yml` or `EODAG_PROVIDERS_CFG_FILE` environment @@ -680,18 +677,18 @@ def load_default_config() -> Dict[str, ProviderConfig]: return load_config(eodag_providers_cfg_file) -def load_config(config_path: str) -> Dict[str, ProviderConfig]: +def load_config(config_path: str) -> dict[str, ProviderConfig]: """Load the providers configuration into a dictionary from a given file :param config_path: The path to the provider config file :returns: The default provider's configuration """ logger.debug("Loading configuration from %s", config_path) - config: Dict[str, ProviderConfig] = {} + config: dict[str, ProviderConfig] = {} try: # Providers configs are stored in this file as separated yaml documents # Load all of it - providers_configs: List[ProviderConfig] = cached_yaml_load_all(config_path) + providers_configs: list[ProviderConfig] = cached_yaml_load_all(config_path) except yaml.parser.ParserError as e: logger.error("Unable to load configuration") raise e @@ -714,7 +711,7 @@ def credentials_in_auth(auth_conf: PluginConfig) -> bool: def share_credentials( - providers_config: Dict[str, ProviderConfig], + providers_config: dict[str, ProviderConfig], ) -> None: """Share credentials between plugins having the same matching criteria @@ -758,7 +755,7 @@ def share_credentials( def provider_config_init( provider_config: ProviderConfig, - stac_search_default_conf: Optional[Dict[str, Any]] = None, + stac_search_default_conf: Optional[dict[str, Any]] = None, ) -> None: """Applies some default values to provider config @@ -796,7 +793,7 @@ def provider_config_init( pass -def override_config_from_file(config: Dict[str, Any], file_path: str) -> None: +def override_config_from_file(config: dict[str, Any], file_path: str) -> None: """Override a configuration with the values in a file :param config: An eodag providers configuration dictionary @@ -814,14 +811,14 @@ def override_config_from_file(config: Dict[str, Any], file_path: str) -> None: override_config_from_mapping(config, config_in_file) -def override_config_from_env(config: Dict[str, Any]) -> None: +def override_config_from_env(config: dict[str, Any]) -> None: """Override a configuration with environment variables values :param config: An eodag providers configuration dictionary """ def build_mapping_from_env( - env_var: str, env_value: str, mapping: Dict[str, Any] + env_var: str, env_value: str, mapping: dict[str, Any] ) -> None: """Recursively build a dictionary from an environment variable. @@ -872,7 +869,7 @@ def build_mapping_from_env( new_map = mapping.setdefault(parts[0], {}) build_mapping_from_env("__".join(parts[1:]), env_value, new_map) - mapping_from_env: Dict[str, Any] = {} + mapping_from_env: dict[str, Any] = {} for env_var in os.environ: if env_var.startswith("EODAG__"): build_mapping_from_env( @@ -885,7 +882,7 @@ def build_mapping_from_env( def override_config_from_mapping( - config: Dict[str, Any], mapping: Dict[str, Any] + config: dict[str, Any], mapping: dict[str, Any] ) -> None: """Override a configuration with the values in a mapping @@ -923,7 +920,7 @@ def override_config_from_mapping( ) # try overriding conf - old_conf: Optional[Dict[str, Any]] = config.get(provider) + old_conf: Optional[dict[str, Any]] = config.get(provider) if old_conf is not None: old_conf.update(new_conf) else: @@ -944,7 +941,7 @@ def override_config_from_mapping( logger.debug(tb.format_exc()) -def merge_configs(config: Dict[str, Any], other_config: Dict[str, Any]) -> None: +def merge_configs(config: dict[str, Any], other_config: dict[str, Any]) -> None: """Override a configuration with the values of another configuration :param config: An eodag providers configuration dictionary @@ -976,7 +973,7 @@ def merge_configs(config: Dict[str, Any], other_config: Dict[str, Any]) -> None: config[provider] = new_conf -def load_yml_config(yml_path: str) -> Dict[Any, Any]: +def load_yml_config(yml_path: str) -> dict[Any, Any]: """Load a conf dictionary from given yml absolute path :returns: The yml configuration file @@ -985,7 +982,7 @@ def load_yml_config(yml_path: str) -> Dict[Any, Any]: return dict_items_recursive_apply(config.source, string_to_jsonpath) -def load_stac_config() -> Dict[str, Any]: +def load_stac_config() -> dict[str, Any]: """Load the stac configuration into a dictionary :returns: The stac configuration @@ -995,7 +992,7 @@ def load_stac_config() -> Dict[str, Any]: ) -def load_stac_api_config() -> Dict[str, Any]: +def load_stac_api_config() -> dict[str, Any]: """Load the stac API configuration into a dictionary :returns: The stac API configuration @@ -1005,7 +1002,7 @@ def load_stac_api_config() -> Dict[str, Any]: ) -def load_stac_provider_config() -> Dict[str, Any]: +def load_stac_provider_config() -> dict[str, Any]: """Load the stac provider configuration into a dictionary :returns: The stac provider configuration @@ -1017,7 +1014,7 @@ def load_stac_provider_config() -> Dict[str, Any]: def get_ext_product_types_conf( conf_uri: str = EXT_PRODUCT_TYPES_CONF_URI, -) -> Dict[str, Any]: +) -> dict[str, Any]: """Read external product types conf :param conf_uri: URI to local or remote configuration file diff --git a/eodag/plugins/apis/base.py b/eodag/plugins/apis/base.py index 5ea263e41..318281d6e 100644 --- a/eodag/plugins/apis/base.py +++ b/eodag/plugins/apis/base.py @@ -57,5 +57,5 @@ class Api(Search, Download): :param provider: An EODAG provider name :type provider: str :param config: An EODAG plugin configuration - :type config: Dict[str, Any] + :type config: dict[str, Any] """ diff --git a/eodag/plugins/apis/ecmwf.py b/eodag/plugins/apis/ecmwf.py index c299d7235..c6e5c0c27 100644 --- a/eodag/plugins/apis/ecmwf.py +++ b/eodag/plugins/apis/ecmwf.py @@ -48,13 +48,14 @@ from eodag.utils.logging import get_logging_verbose if TYPE_CHECKING: - from typing import Any, Dict, List, Optional, Tuple, Union + from typing import Any, Optional, Union from requests.auth import AuthBase from eodag.api.product import EOProduct from eodag.api.search_result import SearchResult from eodag.config import PluginConfig + from eodag.types import S3SessionKwargs from eodag.types.download_args import DownloadConf from eodag.utils import DownloadedCallback, ProgressCallback, Unpack @@ -83,7 +84,7 @@ class EcmwfApi(Api, ECMWFSearch): * :attr:`~eodag.config.PluginConfig.type` (``str``) (**mandatory**): EcmwfApi * :attr:`~eodag.config.PluginConfig.auth_endpoint` (``str``) (**mandatory**): url of the authentication endpoint of the ecmwf api - * :attr:`~eodag.config.PluginConfig.metadata_mapping` (``Dict[str, Union[str, list]]``): how + * :attr:`~eodag.config.PluginConfig.metadata_mapping` (``dict[str, Union[str, list]]``): how parameters should be mapped between the provider and eodag; If a string is given, this is the mapping parameter returned by provider -> eodag parameter. If a list with 2 elements is given, the first one is the mapping eodag parameter -> provider query parameters @@ -104,7 +105,7 @@ def __init__(self, provider: str, config: PluginConfig) -> None: self.config.__dict__.setdefault("pagination", {"next_page_query_obj": "{{}}"}) self.config.__dict__.setdefault("api_endpoint", "") - def do_search(self, *args: Any, **kwargs: Any) -> List[Dict[str, Any]]: + def do_search(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]: """Should perform the actual search request.""" return [{}] @@ -112,7 +113,7 @@ def query( self, prep: PreparedSearch = PreparedSearch(), **kwargs: Any, - ) -> Tuple[List[EOProduct], Optional[int]]: + ) -> tuple[list[EOProduct], Optional[int]]: """Build ready-to-download SearchResult""" # check productType, dates, geometry, use defaults if not specified @@ -145,7 +146,7 @@ def query( return ECMWFSearch.query(self, prep, **kwargs) - def authenticate(self) -> Dict[str, Optional[str]]: + def authenticate(self) -> dict[str, Optional[str]]: """Check credentials and returns information needed for auth :returns: {key, url, email} dictionary @@ -176,7 +177,7 @@ def authenticate(self) -> Dict[str, Optional[str]]: def download( self, product: EOProduct, - auth: Optional[Union[AuthBase, Dict[str, str]]] = None, + auth: Optional[Union[AuthBase, S3SessionKwargs]] = None, progress_callback: Optional[ProgressCallback] = None, wait: float = DEFAULT_DOWNLOAD_WAIT, timeout: float = DEFAULT_DOWNLOAD_TIMEOUT, @@ -265,13 +266,13 @@ def download( def download_all( self, products: SearchResult, - auth: Optional[Union[AuthBase, Dict[str, str]]] = None, + auth: Optional[Union[AuthBase, S3SessionKwargs]] = None, downloaded_callback: Optional[DownloadedCallback] = None, progress_callback: Optional[ProgressCallback] = None, wait: float = DEFAULT_DOWNLOAD_WAIT, timeout: float = DEFAULT_DOWNLOAD_TIMEOUT, **kwargs: Unpack[DownloadConf], - ) -> List[str]: + ) -> list[str]: """ Download all using parent (base plugin) method """ @@ -291,7 +292,7 @@ def clear(self) -> None: def discover_queryables( self, **kwargs: Any - ) -> Optional[Dict[str, Annotated[Any, FieldInfo]]]: + ) -> Optional[dict[str, Annotated[Any, FieldInfo]]]: """Fetch queryables list from provider using metadata mapping :param kwargs: additional filters for queryables (`productType` and other search diff --git a/eodag/plugins/apis/usgs.py b/eodag/plugins/apis/usgs.py index 876045757..c786edcbe 100644 --- a/eodag/plugins/apis/usgs.py +++ b/eodag/plugins/apis/usgs.py @@ -22,7 +22,7 @@ import shutil import tarfile import zipfile -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union, cast +from typing import TYPE_CHECKING, Any, Optional, Union, cast import requests from jsonpath_ng.ext import parse @@ -61,6 +61,7 @@ from eodag.api.search_result import SearchResult from eodag.config import PluginConfig + from eodag.types import S3SessionKwargs from eodag.types.download_args import DownloadConf from eodag.utils import DownloadedCallback, Unpack @@ -86,7 +87,7 @@ class UsgsApi(Api): file should be extracted; default: ``True`` * :attr:`~eodag.config.PluginConfig.order_enabled` (``bool``): if the product has to be ordered to download it; default: ``False`` - * :attr:`~eodag.config.PluginConfig.metadata_mapping` (``Dict[str, Union[str, list]]``): how + * :attr:`~eodag.config.PluginConfig.metadata_mapping` (``dict[str, Union[str, list]]``): how parameters should be mapped between the provider and eodag; If a string is given, this is the mapping parameter returned by provider -> eodag parameter. If a list with 2 elements is given, the first one is the mapping eodag parameter -> provider query parameters @@ -99,7 +100,7 @@ def __init__(self, provider: str, config: PluginConfig) -> None: # Same method as in base.py, Search.__init__() # Prepare the metadata mapping # Do a shallow copy, the structure is flat enough for this to be sufficient - metas: Dict[str, Any] = DEFAULT_METADATA_MAPPING.copy() + metas: dict[str, Any] = DEFAULT_METADATA_MAPPING.copy() # Update the defaults with the mapping value. This will add any new key # added by the provider mapping that is not in the default metadata. metas.update(self.config.metadata_mapping) @@ -138,7 +139,7 @@ def query( self, prep: PreparedSearch = PreparedSearch(), **kwargs: Any, - ) -> Tuple[List[EOProduct], Optional[int]]: + ) -> tuple[list[EOProduct], Optional[int]]: """Search for data on USGS catalogues""" page = prep.page if prep.page is not None else DEFAULT_PAGE items_per_page = ( @@ -164,7 +165,7 @@ def query( start_date = kwargs.pop("startTimeFromAscendingNode", None) end_date = kwargs.pop("completionTimeFromAscendingNode", None) geom = kwargs.pop("geometry", None) - footprint: Dict[str, str] = {} + footprint: dict[str, str] = {} if hasattr(geom, "bounds"): ( footprint["lonmin"], @@ -175,7 +176,7 @@ def query( else: footprint = geom - final: List[EOProduct] = [] + final: list[EOProduct] = [] if footprint and len(footprint.keys()) == 4: # a rectangle (or bbox) lower_left = { "longitude": footprint["lonmin"], @@ -295,7 +296,7 @@ def query( def download( self, product: EOProduct, - auth: Optional[Union[AuthBase, Dict[str, str]]] = None, + auth: Optional[Union[AuthBase, S3SessionKwargs]] = None, progress_callback: Optional[ProgressCallback] = None, wait: float = DEFAULT_DOWNLOAD_WAIT, timeout: float = DEFAULT_DOWNLOAD_TIMEOUT, @@ -340,7 +341,7 @@ def download( product.properties["productId"], ) - req_urls: List[str] = [] + req_urls: list[str] = [] try: if len(download_request_results["data"]["preparingDownloads"]) > 0: req_urls.extend( @@ -464,13 +465,13 @@ def download_request( def download_all( self, products: SearchResult, - auth: Optional[Union[AuthBase, Dict[str, str]]] = None, + auth: Optional[Union[AuthBase, S3SessionKwargs]] = None, downloaded_callback: Optional[DownloadedCallback] = None, progress_callback: Optional[ProgressCallback] = None, wait: float = DEFAULT_DOWNLOAD_WAIT, timeout: float = DEFAULT_DOWNLOAD_TIMEOUT, **kwargs: Unpack[DownloadConf], - ) -> List[str]: + ) -> list[str]: """ Download all using parent (base plugin) method """ diff --git a/eodag/plugins/authentication/aws_auth.py b/eodag/plugins/authentication/aws_auth.py index 85ef36b6a..dc2b2e7ae 100644 --- a/eodag/plugins/authentication/aws_auth.py +++ b/eodag/plugins/authentication/aws_auth.py @@ -17,9 +17,10 @@ # limitations under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Dict +from typing import TYPE_CHECKING, Optional, cast from eodag.plugins.authentication.base import Authentication +from eodag.types import S3SessionKwargs if TYPE_CHECKING: from mypy_boto3_s3.client import S3Client @@ -53,12 +54,12 @@ class AwsAuth(Authentication): def __init__(self, provider: str, config: PluginConfig) -> None: super(AwsAuth, self).__init__(provider, config) - self.aws_access_key_id = None - self.aws_secret_access_key = None - self.aws_session_token = None - self.profile_name = None + self.aws_access_key_id: Optional[str] = None + self.aws_secret_access_key: Optional[str] = None + self.aws_session_token: Optional[str] = None + self.profile_name: Optional[str] = None - def authenticate(self) -> Dict[str, str]: + def authenticate(self) -> S3SessionKwargs: """Authenticate :returns: dict containing AWS/boto3 non-empty credentials @@ -75,10 +76,12 @@ def authenticate(self) -> Dict[str, str]: ) self.profile_name = credentials.get("aws_profile", self.profile_name) - auth_keys = [ - "aws_access_key_id", - "aws_secret_access_key", - "aws_session_token", - "profile_name", - ] - return {k: getattr(self, k) for k in auth_keys if getattr(self, k)} + auth_dict = cast( + S3SessionKwargs, + { + k: getattr(self, k) + for k in S3SessionKwargs.__annotations__ + if getattr(self, k, None) + }, + ) + return auth_dict diff --git a/eodag/plugins/authentication/base.py b/eodag/plugins/authentication/base.py index 02edcbf04..57ce793f4 100644 --- a/eodag/plugins/authentication/base.py +++ b/eodag/plugins/authentication/base.py @@ -17,7 +17,7 @@ # limitations under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Dict, Union +from typing import TYPE_CHECKING, Union from eodag.plugins.base import PluginTopic from eodag.utils.exceptions import MisconfiguredError @@ -25,6 +25,8 @@ if TYPE_CHECKING: from requests.auth import AuthBase + from eodag.types import S3SessionKwargs + class Authentication(PluginTopic): """Plugins authentication Base plugin @@ -34,11 +36,11 @@ class Authentication(PluginTopic): * :attr:`~eodag.config.PluginConfig.matching_url` (``str``): URL pattern to match with search plugin endpoint or download link - * :attr:`~eodag.config.PluginConfig.matching_conf` (``Dict[str, Any]``): Part of the search or download plugin + * :attr:`~eodag.config.PluginConfig.matching_conf` (``dict[str, Any]``): Part of the search or download plugin configuration that needs authentication and helps identifying it """ - def authenticate(self) -> Union[AuthBase, Dict[str, str]]: + def authenticate(self) -> Union[AuthBase, S3SessionKwargs]: """Authenticate""" raise NotImplementedError diff --git a/eodag/plugins/authentication/header.py b/eodag/plugins/authentication/header.py index 6e64d8291..a795f7d12 100644 --- a/eodag/plugins/authentication/header.py +++ b/eodag/plugins/authentication/header.py @@ -17,7 +17,7 @@ # limitations under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Dict +from typing import TYPE_CHECKING from requests.auth import AuthBase @@ -38,7 +38,7 @@ class HTTPHeaderAuth(Authentication): :param config: Authentication plugin configuration: * :attr:`~eodag.config.PluginConfig.type` (``str``) (**mandatory**): HTTPHeaderAuth - * :attr:`~eodag.config.PluginConfig.headers` (``Dict[str, str]``): dictionary containing + * :attr:`~eodag.config.PluginConfig.headers` (``dict[str, str]``): dictionary containing all keys/value pairs that should be added to the headers Below an example for the configuration in the providers config file is shown:: @@ -106,7 +106,7 @@ def authenticate(self) -> HeaderAuth: class HeaderAuth(AuthBase): """HeaderAuth custom authentication class to be used with requests module""" - def __init__(self, authentication_headers: Dict[str, str]) -> None: + def __init__(self, authentication_headers: dict[str, str]) -> None: self.auth_headers = authentication_headers def __call__(self, request: PreparedRequest) -> PreparedRequest: diff --git a/eodag/plugins/authentication/keycloak.py b/eodag/plugins/authentication/keycloak.py index f27145a1b..ca85bfd8e 100644 --- a/eodag/plugins/authentication/keycloak.py +++ b/eodag/plugins/authentication/keycloak.py @@ -18,7 +18,7 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, Dict +from typing import TYPE_CHECKING, Any import requests @@ -56,7 +56,7 @@ class KeycloakOIDCPasswordAuth(OIDCRefreshTokenBase): token should be added to the query string (``qs``) or to the header (``header``) * :attr:`~eodag.config.PluginConfig.token_qs_key` (``str``): (**mandatory if token_provision=qs**) key of the param added to the query string - * :attr:`~eodag.config.PluginConfig.allowed_audiences` (``List[str]``) (**mandatory**): + * :attr:`~eodag.config.PluginConfig.allowed_audiences` (``list[str]``) (**mandatory**): The allowed audiences that have to be present in the user token. * :attr:`~eodag.config.PluginConfig.auth_error_code` (``int``): which error code is returned in case of an authentication error @@ -130,7 +130,7 @@ def authenticate(self) -> AuthBase: key=getattr(self.config, "token_qs_key", None), ) - def _request_new_token(self) -> Dict[str, Any]: + def _request_new_token(self) -> dict[str, Any]: logger.debug("fetching new access token") req_data = { "client_id": self.config.client_id, @@ -154,7 +154,7 @@ def _request_new_token(self) -> Dict[str, Any]: return self._request_new_token_error(e) return response.json() - def _get_token_with_refresh_token(self) -> Dict[str, str]: + def _get_token_with_refresh_token(self) -> dict[str, str]: logger.debug("fetching access token with refresh token") req_data = { "client_id": self.config.client_id, diff --git a/eodag/plugins/authentication/oauth.py b/eodag/plugins/authentication/oauth.py index 1cf90d300..222b699d7 100644 --- a/eodag/plugins/authentication/oauth.py +++ b/eodag/plugins/authentication/oauth.py @@ -17,12 +17,13 @@ # limitations under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Dict, Optional +from typing import TYPE_CHECKING, Optional from eodag.plugins.authentication.base import Authentication if TYPE_CHECKING: from eodag.config import PluginConfig + from eodag.types import S3SessionKwargs class OAuth(Authentication): @@ -43,9 +44,12 @@ def __init__(self, provider: str, config: PluginConfig) -> None: self.access_key: Optional[str] = None self.secret_key: Optional[str] = None - def authenticate(self) -> Dict[str, str]: + def authenticate(self) -> S3SessionKwargs: """Authenticate""" self.validate_config_credentials() self.access_key = self.config.credentials["aws_access_key_id"] self.secret_key = self.config.credentials["aws_secret_access_key"] - return {"access_key": self.access_key, "secret_key": self.secret_key} + return { + "aws_access_key_id": self.access_key, + "aws_secret_access_key": self.secret_key, + } diff --git a/eodag/plugins/authentication/openid_connect.py b/eodag/plugins/authentication/openid_connect.py index ea1d9d01b..e81bee375 100644 --- a/eodag/plugins/authentication/openid_connect.py +++ b/eodag/plugins/authentication/openid_connect.py @@ -22,7 +22,7 @@ import string from datetime import datetime, timedelta, timezone from random import SystemRandom -from typing import TYPE_CHECKING, Any, Dict, Optional +from typing import TYPE_CHECKING, Any, Optional import jwt import requests @@ -88,7 +88,7 @@ def __init__(self, provider: str, config: PluginConfig) -> None: self.authorization_endpoint = auth_config["authorization_endpoint"] self.algorithms = auth_config["id_token_signing_alg_values_supported"] - def decode_jwt_token(self, token: str) -> Dict[str, Any]: + def decode_jwt_token(self, token: str) -> dict[str, Any]: """Decode JWT token.""" try: key = self.jwks_client.get_signing_key_from_jwt(token).key @@ -144,13 +144,13 @@ def _get_access_token(self) -> str: return self.access_token - def _request_new_token(self) -> Dict[str, str]: + def _request_new_token(self) -> dict[str, str]: """Fetch the access token with a new authentication""" raise NotImplementedError( "Incomplete OIDC refresh token retrieval mechanism implementation" ) - def _request_new_token_error(self, e: requests.RequestException) -> Dict[str, str]: + def _request_new_token_error(self, e: requests.RequestException) -> dict[str, str]: """Handle RequestException raised by `self._request_new_token()`""" if self.access_token: # try using already retrieved token if authenticate() fails (OTP use-case) @@ -186,7 +186,7 @@ def _request_new_token_error(self, e: requests.RequestException) -> Dict[str, st ) ) - def _get_token_with_refresh_token(self) -> Dict[str, str]: + def _get_token_with_refresh_token(self) -> dict[str, str]: """Fetch the access token with the refresh token""" raise NotImplementedError( "Incomplete OIDC refresh token retrieval mechanism implementation" @@ -241,21 +241,21 @@ class OIDCAuthorizationCodeFlowAuth(OIDCRefreshTokenBase): authentication_uri_source=config**) The URL of the authentication backend of the OIDC provider * :attr:`~eodag.config.PluginConfig.user_consent_form_xpath` (``str``): The xpath to the user consent form. The form is searched in the content of the response to the authorization request - * :attr:`~eodag.config.PluginConfig.user_consent_form_data` (``Dict[str, str]``): The data that + * :attr:`~eodag.config.PluginConfig.user_consent_form_data` (``dict[str, str]``): The data that will be passed with the POST request on the form 'action' URL. The data are given as key value pairs, the keys representing the data key and the value being either a 'constant' string value, or a string of the form 'xpath()' and representing a value to be retrieved in the user consent form. The xpath must resolve directly to a string value, not to an HTML element. Example: ``xpath(//input[@name="sessionDataKeyConsent"]/@value)`` - * :attr:`~eodag.config.PluginConfig.additional_login_form_data` (``Dict[str, str]``): A mapping + * :attr:`~eodag.config.PluginConfig.additional_login_form_data` (``dict[str, str]``): A mapping giving additional data to be passed to the login POST request. The value follows the same rules as with user_consent_form_data - * :attr:`~eodag.config.PluginConfig.exchange_url_error_pattern` (``Dict[str, str]``): Key/value + * :attr:`~eodag.config.PluginConfig.exchange_url_error_pattern` (``dict[str, str]``): Key/value pairs of patterns/messages. If exchange_url contains the given pattern, the associated message will be sent in an AuthenticationError * :attr:`~eodag.config.PluginConfig.client_secret` (``str``): The OIDC provider's client secret of the eodag provider - * :attr:`~eodag.config.PluginConfig.token_exchange_params` (``Dict[str, str]``): mandatory + * :attr:`~eodag.config.PluginConfig.token_exchange_params` (``dict[str, str]``): mandatory keys for the dict: redirect_uri, client_id; A mapping between OIDC url query string and token handler query string params (only necessary if they are not the same as for OIDC). This is eodag provider dependant @@ -298,7 +298,7 @@ def authenticate(self) -> CodeAuthorizedAuth: key=getattr(self.config, "token_qs_key", None), ) - def _request_new_token(self) -> Dict[str, str]: + def _request_new_token(self) -> dict[str, str]: """Fetch the access token with a new authentication""" logger.debug("Fetching access token from %s", self.token_endpoint) state = self.compute_state() @@ -326,12 +326,12 @@ def _request_new_token(self) -> Dict[str, str]: return self._request_new_token_error(e) return token_response.json() - def _get_token_with_refresh_token(self) -> Dict[str, str]: + def _get_token_with_refresh_token(self) -> dict[str, str]: """Fetch the access token with the refresh token""" logger.debug( "Fetching access token with refresh token from %s.", self.token_endpoint ) - token_data: Dict[str, Any] = { + token_data: dict[str, Any] = { "refresh_token": self.refresh_token, "grant_type": "refresh_token", } @@ -435,7 +435,7 @@ def grant_user_consent(self, authentication_response: Response) -> Response: verify=ssl_verify, ) - def _prepare_token_post_data(self, token_data: Dict[str, Any]) -> Dict[str, Any]: + def _prepare_token_post_data(self, token_data: dict[str, Any]) -> dict[str, Any]: """Prepare the common data to post to the token URI""" token_data.update( { @@ -471,7 +471,7 @@ def exchange_code_for_token(self, authorized_url: str, state: str) -> Response: "The state received in the authorized url does not match initially computed state" ) code = qs["code"][0] - token_exchange_data: Dict[str, Any] = { + token_exchange_data: dict[str, Any] = { "code": code, "state": state, "grant_type": "authorization_code", diff --git a/eodag/plugins/authentication/sas_auth.py b/eodag/plugins/authentication/sas_auth.py index 4b0e9cb98..628b96c08 100644 --- a/eodag/plugins/authentication/sas_auth.py +++ b/eodag/plugins/authentication/sas_auth.py @@ -19,7 +19,7 @@ import logging from json import JSONDecodeError -from typing import TYPE_CHECKING, Dict, Optional +from typing import TYPE_CHECKING, Optional import requests from requests.auth import AuthBase @@ -42,13 +42,13 @@ def __init__( self, auth_uri: str, signed_url_key: str, - headers: Optional[Dict[str, str]] = None, + headers: Optional[dict[str, str]] = None, ssl_verify: bool = True, ) -> None: self.auth_uri = auth_uri self.signed_url_key = signed_url_key self.headers = headers - self.signed_urls: Dict[str, str] = {} + self.signed_urls: dict[str, str] = {} self.ssl_verify = ssl_verify def __call__(self, request: PreparedRequest) -> PreparedRequest: @@ -97,7 +97,7 @@ class SASAuth(Authentication): get the signed url * :attr:`~eodag.config.PluginConfig.signed_url_key` (``str``) (**mandatory**): key to get the signed url - * :attr:`~eodag.config.PluginConfig.headers` (``Dict[str, str]``) (**mandatory if + * :attr:`~eodag.config.PluginConfig.headers` (``dict[str, str]``) (**mandatory if apiKey is used**): headers to be added to the requests * :attr:`~eodag.config.PluginConfig.ssl_verify` (``bool``): if the ssl certificates should be verified in the requests; default: ``True`` diff --git a/eodag/plugins/authentication/token.py b/eodag/plugins/authentication/token.py index 85c2c3408..51d7068e5 100644 --- a/eodag/plugins/authentication/token.py +++ b/eodag/plugins/authentication/token.py @@ -18,7 +18,7 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, Dict, Optional +from typing import TYPE_CHECKING, Any, Optional from urllib.parse import parse_qs, urlencode, urlparse, urlunparse import requests @@ -60,9 +60,9 @@ class TokenAuth(Authentication): * :attr:`~eodag.config.PluginConfig.type` (``str``) (**mandatory**): TokenAuth * :attr:`~eodag.config.PluginConfig.auth_uri` (``str``) (**mandatory**): url used to fetch the access token with user/password - * :attr:`~eodag.config.PluginConfig.headers` (``Dict[str, str]``): Dictionary containing all + * :attr:`~eodag.config.PluginConfig.headers` (``dict[str, str]``): Dictionary containing all keys/value pairs that should be added to the headers - * :attr:`~eodag.config.PluginConfig.retrieve_headers` (``Dict[str, str]``): Dictionary containing all + * :attr:`~eodag.config.PluginConfig.retrieve_headers` (``dict[str, str]``): Dictionary containing all keys/value pairs that should be added to the headers for token retrieve only * :attr:`~eodag.config.PluginConfig.refresh_uri` (``str``) : url used to fetch the access token with a refresh token @@ -76,13 +76,13 @@ class TokenAuth(Authentication): should be verified in the requests; default: ``True`` * :attr:`~eodag.config.PluginConfig.auth_error_code` (``int``): which error code is returned in case of an authentication error - * :attr:`~eodag.config.PluginConfig.req_data` (``Dict[str, Any]``): if the credentials + * :attr:`~eodag.config.PluginConfig.req_data` (``dict[str, Any]``): if the credentials should be sent as data in the post request, the json structure can be given in this parameter * :attr:`~eodag.config.PluginConfig.retry_total` (``int``): :class:`urllib3.util.Retry` ``total`` parameter, total number of retries to allow; default: ``3`` * :attr:`~eodag.config.PluginConfig.retry_backoff_factor` (``int``): :class:`urllib3.util.Retry` ``backoff_factor`` parameter, backoff factor to apply between attempts after the second try; default: ``2`` - * :attr:`~eodag.config.PluginConfig.retry_status_forcelist` (``List[int]``): :class:`urllib3.util.Retry` + * :attr:`~eodag.config.PluginConfig.retry_status_forcelist` (``list[int]``): :class:`urllib3.util.Retry` ``status_forcelist`` parameter, list of integer HTTP status codes that we should force a retry on; default: ``[401, 429, 500, 502, 503, 504]`` """ @@ -200,7 +200,7 @@ def _token_request( headers = self.config.headers # append headers to req if some are specified in config - req_kwargs: Dict[str, Any] = {"headers": dict(headers, **USER_AGENT)} + req_kwargs: dict[str, Any] = {"headers": dict(headers, **USER_AGENT)} ssl_verify = getattr(self.config, "ssl_verify", True) if self.refresh_token: @@ -260,7 +260,7 @@ def __init__( token: str, where: str, qs_key: Optional[str] = None, - headers: Optional[Dict[str, str]] = None, + headers: Optional[dict[str, str]] = None, ) -> None: self.token = token self.where = where diff --git a/eodag/plugins/authentication/token_exchange.py b/eodag/plugins/authentication/token_exchange.py index 0c19608a5..b809acca0 100644 --- a/eodag/plugins/authentication/token_exchange.py +++ b/eodag/plugins/authentication/token_exchange.py @@ -41,7 +41,7 @@ class OIDCTokenExchangeAuth(Authentication): :param provider: provider name :param config: Authentication plugin configuration: - * :attr:`~eodag.config.PluginConfig.subject` (``Dict[str, Any]``) (**mandatory**): + * :attr:`~eodag.config.PluginConfig.subject` (``dict[str, Any]``) (**mandatory**): The full :class:`~eodag.plugins.authentication.openid_connect.OIDCAuthorizationCodeFlowAuth` plugin configuration used to retrieve subject token * :attr:`~eodag.config.PluginConfig.subject_issuer` (``str``) (**mandatory**): Identifies diff --git a/eodag/plugins/base.py b/eodag/plugins/base.py index e3ce4edcc..f767bf8fe 100644 --- a/eodag/plugins/base.py +++ b/eodag/plugins/base.py @@ -17,7 +17,7 @@ # limitations under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Any, Dict, List, Tuple +from typing import TYPE_CHECKING, Any from eodag.utils.exceptions import PluginNotFoundError @@ -29,21 +29,21 @@ class EODAGPluginMount(type): """Plugin mount""" def __init__( - cls, name: str, bases: Tuple[type, ...], attrs: Dict[str, Any] + cls, name: str, bases: tuple[type, ...], attrs: dict[str, Any] ) -> None: if not hasattr(cls, "plugins"): # This branch only executes when processing the mount point itself. # So, since this is a new plugin type, not an implementation, this # class shouldn't be registered as a plugin. Instead, it sets up a # list where plugins can be registered later. - cls.plugins: List[EODAGPluginMount] = [] + cls.plugins: list[EODAGPluginMount] = [] else: # This must be a plugin implementation, which should be registered. # Simply appending it to the list is all that's needed to keep # track of it later. cls.plugins.append(cls) - def get_plugins(cls, *args: Any, **kwargs: Any) -> List[EODAGPluginMount]: + def get_plugins(cls, *args: Any, **kwargs: Any) -> list[EODAGPluginMount]: """Get plugins""" return [plugin(*args, **kwargs) for plugin in cls.plugins] diff --git a/eodag/plugins/crunch/base.py b/eodag/plugins/crunch/base.py index 15d042b63..65b07f38a 100644 --- a/eodag/plugins/crunch/base.py +++ b/eodag/plugins/crunch/base.py @@ -17,7 +17,7 @@ # limitations under the License from __future__ import annotations -from typing import TYPE_CHECKING, Any, Dict, List, Optional +from typing import TYPE_CHECKING, Any, Optional from eodag.config import PluginConfig from eodag.plugins.base import PluginTopic @@ -32,12 +32,12 @@ class Crunch(PluginTopic): :param config: Crunch configuration """ - def __init__(self, config: Optional[Dict[str, Any]]) -> None: + def __init__(self, config: Optional[dict[str, Any]]) -> None: self.config = PluginConfig() self.config.__dict__ = config if config is not None else {} def proceed( - self, products: List[EOProduct], **search_params: Any - ) -> List[EOProduct]: + self, products: list[EOProduct], **search_params: Any + ) -> list[EOProduct]: """Implementation of how the results must be crunched""" raise NotImplementedError diff --git a/eodag/plugins/crunch/filter_date.py b/eodag/plugins/crunch/filter_date.py index 0a77c5399..d72743356 100644 --- a/eodag/plugins/crunch/filter_date.py +++ b/eodag/plugins/crunch/filter_date.py @@ -21,7 +21,7 @@ import logging import time from datetime import datetime as dt -from typing import TYPE_CHECKING, Any, List +from typing import TYPE_CHECKING, Any import dateutil.parser from dateutil import tz @@ -57,8 +57,8 @@ def sort_product_by_start_date(product: EOProduct) -> dt: return dateutil.parser.parse(start_date) def proceed( - self, products: List[EOProduct], **search_params: Any - ) -> List[EOProduct]: + self, products: list[EOProduct], **search_params: Any + ) -> list[EOProduct]: """Execute crunch: Filter products between start and end dates. :param products: A list of products resulting from a search @@ -89,7 +89,7 @@ def proceed( if not filter_start and not filter_end: return products - filtered: List[EOProduct] = [] + filtered: list[EOProduct] = [] for product in products: # product start date diff --git a/eodag/plugins/crunch/filter_latest_intersect.py b/eodag/plugins/crunch/filter_latest_intersect.py index 09abdbd05..cbe63b543 100644 --- a/eodag/plugins/crunch/filter_latest_intersect.py +++ b/eodag/plugins/crunch/filter_latest_intersect.py @@ -20,7 +20,7 @@ import datetime import logging import time -from typing import TYPE_CHECKING, Any, Dict, List, Union +from typing import TYPE_CHECKING, Any, Union import dateutil.parser from shapely import geometry @@ -54,14 +54,14 @@ def sort_product_by_start_date(product: EOProduct) -> dt: return dateutil.parser.parse(start_date) def proceed( - self, products: List[EOProduct], **search_params: Dict[str, Any] - ) -> List[EOProduct]: + self, products: list[EOProduct], **search_params: dict[str, Any] + ) -> list[EOProduct]: """Execute crunch: Filter latest products (the ones with a the highest start date) that intersect search extent. :param products: A list of products resulting from a search :param search_params: Search criteria that must contain ``geometry`` or ``geom`` parameters having value of - type :class:`shapely.geometry.base.BaseGeometry` or ``Dict[str, Any]`` + type :class:`shapely.geometry.base.BaseGeometry` or ``dict[str, Any]`` :returns: The filtered products """ logger.debug("Start filtering for latest products") @@ -69,9 +69,9 @@ def proceed( return [] # Warning: May crash if startTimeFromAscendingNode is not in the appropriate format products.sort(key=self.sort_product_by_start_date, reverse=True) - filtered: List[EOProduct] = [] + filtered: list[EOProduct] = [] add_to_filtered = filtered.append - footprint: Union[Dict[str, Any], BaseGeometry, Any] = search_params.get( + footprint: Union[dict[str, Any], BaseGeometry, Any] = search_params.get( "geometry" ) or search_params.get("geom") if not footprint: diff --git a/eodag/plugins/crunch/filter_latest_tpl_name.py b/eodag/plugins/crunch/filter_latest_tpl_name.py index 2acaeb5fb..b7337a854 100644 --- a/eodag/plugins/crunch/filter_latest_tpl_name.py +++ b/eodag/plugins/crunch/filter_latest_tpl_name.py @@ -19,7 +19,7 @@ import logging import re -from typing import TYPE_CHECKING, Any, Dict, List, Match, Optional, cast +from typing import TYPE_CHECKING, Any, Optional, cast from eodag.plugins.crunch.base import Crunch from eodag.utils.exceptions import ValidationError @@ -42,7 +42,7 @@ class FilterLatestByName(Crunch): NAME_PATTERN_CONSTRAINT = re.compile(r"\(\?P\\d\{6\}\)") - def __init__(self, config: Dict[str, Any]) -> None: + def __init__(self, config: dict[str, Any]) -> None: super(FilterLatestByName, self).__init__(config) name_pattern = config.pop("name_pattern") if not self.NAME_PATTERN_CONSTRAINT.search(name_pattern): @@ -54,19 +54,19 @@ def __init__(self, config: Dict[str, Any]) -> None: self.name_pattern = re.compile(name_pattern) def proceed( - self, products: List[EOProduct], **search_params: Any - ) -> List[EOProduct]: + self, products: list[EOProduct], **search_params: Any + ) -> list[EOProduct]: """Execute crunch: Filter Search results to get only the latest product, based on the name of the product :param products: A list of products resulting from a search :returns: The filtered products """ logger.debug("Starting products filtering") - processed: List[str] = [] - filtered: List[EOProduct] = [] + processed: list[str] = [] + filtered: list[EOProduct] = [] for product in products: match = cast( - Optional[Match[Any]], + Optional[re.Match[Any]], self.name_pattern.match(product.properties["title"]), ) if match: diff --git a/eodag/plugins/crunch/filter_overlap.py b/eodag/plugins/crunch/filter_overlap.py index 6446adc80..ce96134dc 100644 --- a/eodag/plugins/crunch/filter_overlap.py +++ b/eodag/plugins/crunch/filter_overlap.py @@ -18,7 +18,7 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, List +from typing import TYPE_CHECKING, Any from eodag.plugins.crunch.base import Crunch from eodag.utils import get_geometry_from_various @@ -49,8 +49,8 @@ class FilterOverlap(Crunch): """ def proceed( - self, products: List[EOProduct], **search_params: Any - ) -> List[EOProduct]: + self, products: list[EOProduct], **search_params: Any + ) -> list[EOProduct]: """Execute crunch: Filter products, retaining only those that are overlapping with the search_extent :param products: A list of products resulting from a search @@ -58,7 +58,7 @@ def proceed( :returns: The filtered products """ logger.debug("Start filtering for overlapping products") - filtered: List[EOProduct] = [] + filtered: list[EOProduct] = [] add_to_filtered = filtered.append search_geom = get_geometry_from_various(**search_params) diff --git a/eodag/plugins/crunch/filter_property.py b/eodag/plugins/crunch/filter_property.py index ff83812c3..14cdbbe00 100644 --- a/eodag/plugins/crunch/filter_property.py +++ b/eodag/plugins/crunch/filter_property.py @@ -19,7 +19,7 @@ import logging import operator -from typing import TYPE_CHECKING, Any, List +from typing import TYPE_CHECKING, Any from eodag.plugins.crunch.base import Crunch @@ -42,8 +42,8 @@ class FilterProperty(Crunch): """ def proceed( - self, products: List[EOProduct], **search_params: Any - ) -> List[EOProduct]: + self, products: list[EOProduct], **search_params: Any + ) -> list[EOProduct]: """Execute crunch: Filter products, retaining only those that match property filtering :param products: A list of products resulting from a search @@ -72,7 +72,7 @@ def proceed( property_key, property_value, ) - filtered: List[EOProduct] = [] + filtered: list[EOProduct] = [] add_to_filtered = filtered.append for product in products: diff --git a/eodag/plugins/download/aws.py b/eodag/plugins/download/aws.py index 08c008d9c..2c24f9c99 100644 --- a/eodag/plugins/download/aws.py +++ b/eodag/plugins/download/aws.py @@ -23,21 +23,7 @@ from datetime import datetime from itertools import chain from pathlib import Path -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - Iterator, - List, - Match, - Optional, - Set, - Tuple, - TypedDict, - Union, - cast, -) +from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, Union, cast import boto3 import requests @@ -81,6 +67,7 @@ from eodag.api.product import EOProduct from eodag.api.search_result import SearchResult from eodag.config import PluginConfig + from eodag.types import S3SessionKwargs from eodag.types.download_args import DownloadConf from eodag.utils import DownloadedCallback, Unpack @@ -230,14 +217,14 @@ class AwsDownload(Download): * :attr:`~eodag.config.PluginConfig.bucket_path_level` (``int``): at which level of the path part of the url the bucket can be found; If no bucket_path_level is given, the bucket is taken from the first element of the netloc part. - * :attr:`~eodag.config.PluginConfig.products` (``Dict[str, Dict[str, Any]``): product type + * :attr:`~eodag.config.PluginConfig.products` (``dict[str, dict[str, Any]``): product type specific config; the keys are the product types, the values are dictionaries which can contain the keys: * **default_bucket** (``str``): bucket where the product type can be found * **complementary_url_key** (``str``): keys to add additional urls * **build_safe** (``bool``): if a SAFE (Standard Archive Format for Europe) product should be created; used for Sentinel products; default: False - * **fetch_metadata** (``Dict[str, Any]``): config for metadata to be fetched for the SAFE product + * **fetch_metadata** (``dict[str, Any]``): config for metadata to be fetched for the SAFE product """ @@ -249,7 +236,7 @@ def __init__(self, provider: str, config: PluginConfig) -> None: def download( self, product: EOProduct, - auth: Optional[Union[AuthBase, Dict[str, str]]] = None, + auth: Optional[Union[AuthBase, S3SessionKwargs]] = None, progress_callback: Optional[ProgressCallback] = None, wait: float = DEFAULT_DOWNLOAD_WAIT, timeout: float = DEFAULT_DOWNLOAD_TIMEOUT, @@ -407,7 +394,7 @@ def _download_preparation( product: EOProduct, progress_callback: ProgressCallback, **kwargs: Unpack[DownloadConf], - ) -> Tuple[Optional[str], Optional[str]]: + ) -> tuple[Optional[str], Optional[str]]: """ preparation for the download: - check if file was already downloaded @@ -480,7 +467,7 @@ def _get_bucket_names_and_prefixes( product: EOProduct, asset_filter: Optional[str] = None, ignore_assets: Optional[bool] = False, - ) -> List[Tuple[str, Optional[str]]]: + ) -> list[tuple[str, Optional[str]]]: """ Retrieves the bucket names and path prefixes for the assets @@ -522,9 +509,9 @@ def _get_bucket_names_and_prefixes( def _do_authentication( self, - bucket_names_and_prefixes: List[Tuple[str, Optional[str]]], - auth: Optional[Union[AuthBase, Dict[str, str]]] = None, - ) -> Tuple[Dict[str, Any], ResourceCollection]: + bucket_names_and_prefixes: list[tuple[str, Optional[str]]], + auth: Optional[Union[AuthBase, S3SessionKwargs]] = None, + ) -> tuple[dict[str, Any], ResourceCollection]: """ authenticates with s3 and retrieves the available objects raises an error when authentication is not possible @@ -538,8 +525,8 @@ def _do_authentication( ) if auth is None: auth = {} - authenticated_objects: Dict[str, Any] = {} - auth_error_messages: Set[str] = set() + authenticated_objects: dict[str, Any] = {} + auth_error_messages: set[str] = set() for _, pack in enumerate(bucket_names_and_prefixes): try: bucket_name, prefix = pack @@ -591,12 +578,12 @@ def _do_authentication( def _get_unique_products( self, - bucket_names_and_prefixes: List[Tuple[str, Optional[str]]], - authenticated_objects: Dict[str, Any], + bucket_names_and_prefixes: list[tuple[str, Optional[str]]], + authenticated_objects: dict[str, Any], asset_filter: Optional[str], ignore_assets: bool, product: EOProduct, - ) -> Set[Any]: + ) -> set[Any]: """ retrieve unique product chunks based on authenticated objects and asset filters :param bucket_names_and_prefixes: list of bucket names and corresponding path prefixes @@ -606,7 +593,7 @@ def _get_unique_products( :param product: product that shall be downloaded :return: set of product chunks that can be downloaded """ - product_chunks: List[Any] = [] + product_chunks: list[Any] = [] for bucket_name, prefix in bucket_names_and_prefixes: # unauthenticated items filtered out if bucket_name in authenticated_objects.keys(): @@ -637,7 +624,7 @@ def _get_unique_products( def _raise_if_auth_error(self, exception: ClientError) -> None: """Raises an error if given exception is an authentication error""" - err = cast(Dict[str, str], exception.response["Error"]) + err = cast(dict[str, str], exception.response["Error"]) if err["Code"] in AWS_AUTH_ERROR_MESSAGES and "key" in err["Message"].lower(): raise AuthenticationError( f"Please check your credentials for {self.provider}.", @@ -648,7 +635,7 @@ def _raise_if_auth_error(self, exception: ClientError) -> None: def _stream_download_dict( self, product: EOProduct, - auth: Optional[Union[AuthBase, Dict[str, str]]] = None, + auth: Optional[Union[AuthBase, S3SessionKwargs]] = None, progress_callback: Optional[ProgressCallback] = None, wait: float = DEFAULT_DOWNLOAD_WAIT, timeout: float = DEFAULT_DOWNLOAD_TIMEOUT, @@ -754,11 +741,11 @@ def _stream_download_dict( def _stream_download( self, - unique_product_chunks: Set[Any], + unique_product_chunks: set[Any], product: EOProduct, build_safe: bool, progress_callback: ProgressCallback, - assets_values: List[Dict[str, Any]], + assets_values: list[dict[str, Any]], ) -> Iterator[Any]: """Yield product data chunks""" @@ -829,7 +816,7 @@ def get_chunk_parts( ) def _get_commonpath( - self, product: EOProduct, product_chunks: Set[Any], build_safe: bool + self, product: EOProduct, product_chunks: set[Any], build_safe: bool ) -> str: chunk_paths = [] for product_chunk in product_chunks: @@ -839,8 +826,8 @@ def _get_commonpath( return os.path.commonpath(chunk_paths) def get_rio_env( - self, bucket_name: str, prefix: str, auth_dict: Dict[str, str] - ) -> Dict[str, Any]: + self, bucket_name: str, prefix: str, auth_dict: S3SessionKwargs + ) -> dict[str, Any]: """Get rasterio environment variables needed for data access authentication. :param bucket_name: Bucket containg objects @@ -864,7 +851,7 @@ def get_rio_env( return {"aws_unsigned": True} def get_authenticated_objects( - self, bucket_name: str, prefix: str, auth_dict: Dict[str, str] + self, bucket_name: str, prefix: str, auth_dict: S3SessionKwargs ) -> ResourceCollection: """Get boto3 authenticated objects for the given bucket using the most adapted auth strategy. @@ -876,8 +863,8 @@ def get_authenticated_objects( :param auth_dict: Dictionary containing authentication keys :returns: The boto3 authenticated objects """ - auth_methods: List[ - Callable[[str, str, Dict[str, str]], Optional[ResourceCollection]] + auth_methods: list[ + Callable[[str, str, S3SessionKwargs], Optional[ResourceCollection]] ] = [ self._get_authenticated_objects_unsigned, self._get_authenticated_objects_from_auth_profile, @@ -912,7 +899,7 @@ def get_authenticated_objects( ) def _get_authenticated_objects_unsigned( - self, bucket_name: str, prefix: str, auth_dict: Dict[str, str] + self, bucket_name: str, prefix: str, auth_dict: S3SessionKwargs ) -> Optional[ResourceCollection]: """Auth strategy using no-sign-request""" @@ -927,7 +914,7 @@ def _get_authenticated_objects_unsigned( return objects def _get_authenticated_objects_from_auth_profile( - self, bucket_name: str, prefix: str, auth_dict: Dict[str, str] + self, bucket_name: str, prefix: str, auth_dict: S3SessionKwargs ) -> Optional[ResourceCollection]: """Auth strategy using RequestPayer=requester and ``aws_profile`` from provided credentials""" @@ -950,21 +937,12 @@ def _get_authenticated_objects_from_auth_profile( return None def _get_authenticated_objects_from_auth_keys( - self, bucket_name: str, prefix: str, auth_dict: Dict[str, str] + self, bucket_name: str, prefix: str, auth_dict: S3SessionKwargs ) -> Optional[ResourceCollection]: """Auth strategy using RequestPayer=requester and ``aws_access_key_id``/``aws_secret_access_key`` from provided credentials""" if all(k in auth_dict for k in ("aws_access_key_id", "aws_secret_access_key")): - S3SessionKwargs = TypedDict( - "S3SessionKwargs", - { - "aws_access_key_id": str, - "aws_secret_access_key": str, - "aws_session_token": str, - }, - total=False, - ) s3_session_kwargs: S3SessionKwargs = { "aws_access_key_id": auth_dict["aws_access_key_id"], "aws_secret_access_key": auth_dict["aws_secret_access_key"], @@ -989,7 +967,7 @@ def _get_authenticated_objects_from_auth_keys( return None def _get_authenticated_objects_from_env( - self, bucket_name: str, prefix: str, auth_dict: Dict[str, str] + self, bucket_name: str, prefix: str, auth_dict: S3SessionKwargs ) -> Optional[ResourceCollection]: """Auth strategy using RequestPayer=requester and current environment""" @@ -1009,7 +987,7 @@ def _get_authenticated_objects_from_env( def get_product_bucket_name_and_prefix( self, product: EOProduct, url: Optional[str] = None - ) -> Tuple[str, Optional[str]]: + ) -> tuple[str, Optional[str]]: """Extract bucket name and prefix from product URL :param product: The EO product to download @@ -1140,7 +1118,7 @@ def get_chunk_dest_path( s1_title_suffix: Optional[str] = None # S2 common if product.product_type and "S2_MSI" in product.product_type: - title_search: Optional[Match[str]] = re.search( + title_search: Optional[re.Match[str]] = re.search( r"^\w+_\w+_(\w+)_(\w+)_(\w+)_(\w+)_(\w+)$", product.properties["title"], ) @@ -1326,13 +1304,13 @@ def get_chunk_dest_path( def download_all( self, products: SearchResult, - auth: Optional[Union[AuthBase, Dict[str, str]]] = None, + auth: Optional[Union[AuthBase, S3SessionKwargs]] = None, downloaded_callback: Optional[DownloadedCallback] = None, progress_callback: Optional[ProgressCallback] = None, wait: float = DEFAULT_DOWNLOAD_WAIT, timeout: float = DEFAULT_DOWNLOAD_TIMEOUT, **kwargs: Unpack[DownloadConf], - ) -> List[str]: + ) -> list[str]: """ download_all using parent (base plugin) method """ diff --git a/eodag/plugins/download/base.py b/eodag/plugins/download/base.py index 7b1cfec37..a5025e705 100644 --- a/eodag/plugins/download/base.py +++ b/eodag/plugins/download/base.py @@ -26,17 +26,7 @@ import zipfile from datetime import datetime, timedelta from time import sleep -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - List, - Optional, - Tuple, - TypeVar, - Union, -) +from typing import TYPE_CHECKING, Any, Callable, Optional, TypeVar, Union from eodag.plugins.base import PluginTopic from eodag.utils import ( @@ -60,6 +50,7 @@ from eodag.api.product import EOProduct from eodag.api.search_result import SearchResult from eodag.config import PluginConfig + from eodag.types import S3SessionKwargs from eodag.types.download_args import DownloadConf from eodag.utils import DownloadedCallback, Unpack @@ -110,7 +101,7 @@ def __init__(self, provider: str, config: PluginConfig) -> None: def download( self, product: EOProduct, - auth: Optional[Union[AuthBase, Dict[str, str]]] = None, + auth: Optional[Union[AuthBase, S3SessionKwargs]] = None, progress_callback: Optional[ProgressCallback] = None, wait: float = DEFAULT_DOWNLOAD_WAIT, timeout: float = DEFAULT_DOWNLOAD_TIMEOUT, @@ -140,7 +131,7 @@ def download( def _stream_download_dict( self, product: EOProduct, - auth: Optional[Union[AuthBase, Dict[str, str]]] = None, + auth: Optional[Union[AuthBase, S3SessionKwargs]] = None, progress_callback: Optional[ProgressCallback] = None, wait: float = DEFAULT_DOWNLOAD_WAIT, timeout: float = DEFAULT_DOWNLOAD_TIMEOUT, @@ -170,7 +161,7 @@ def _prepare_download( product: EOProduct, progress_callback: Optional[ProgressCallback] = None, **kwargs: Unpack[DownloadConf], - ) -> Tuple[Optional[str], Optional[str]]: + ) -> tuple[Optional[str], Optional[str]]: """Check if file has already been downloaded, and prepare product download :param product: The EO product to download @@ -439,13 +430,13 @@ def _finalize( def download_all( self, products: SearchResult, - auth: Optional[Union[AuthBase, Dict[str, str]]] = None, + auth: Optional[Union[AuthBase, S3SessionKwargs]] = None, downloaded_callback: Optional[DownloadedCallback] = None, progress_callback: Optional[ProgressCallback] = None, wait: float = DEFAULT_DOWNLOAD_WAIT, timeout: float = DEFAULT_DOWNLOAD_TIMEOUT, **kwargs: Unpack[DownloadConf], - ) -> List[str]: + ) -> list[str]: """ Base download_all method. @@ -474,7 +465,7 @@ def download_all( # Products are going to be removed one by one from this sequence once # downloaded. products = products[:] - paths: List[str] = [] + paths: list[str] = [] # initiate retry loop start_time = datetime.now() stop_time = start_time + timedelta(minutes=timeout) diff --git a/eodag/plugins/download/creodias_s3.py b/eodag/plugins/download/creodias_s3.py index 2c3a4afd1..97f963a0c 100644 --- a/eodag/plugins/download/creodias_s3.py +++ b/eodag/plugins/download/creodias_s3.py @@ -15,7 +15,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import List, Optional, Tuple +from typing import Optional import boto3 from botocore.exceptions import ClientError @@ -73,7 +73,7 @@ def _get_bucket_names_and_prefixes( product: EOProduct, asset_filter: Optional[str] = None, ignore_assets: Optional[bool] = False, - ) -> List[Tuple[str, Optional[str]]]: + ) -> list[tuple[str, Optional[str]]]: """ Retrieves the bucket names and path prefixes for the assets diff --git a/eodag/plugins/download/http.py b/eodag/plugins/download/http.py index ba5a9a67b..c46cad1b9 100644 --- a/eodag/plugins/download/http.py +++ b/eodag/plugins/download/http.py @@ -28,17 +28,7 @@ from itertools import chain from json import JSONDecodeError from pathlib import Path -from typing import ( - TYPE_CHECKING, - Any, - Dict, - Iterator, - List, - Optional, - TypedDict, - Union, - cast, -) +from typing import TYPE_CHECKING, Any, Iterator, Optional, TypedDict, Union, cast from urllib.parse import parse_qs, urlparse import geojson @@ -92,6 +82,7 @@ from eodag.api.product import Asset, EOProduct # type: ignore from eodag.api.search_result import SearchResult from eodag.config import PluginConfig + from eodag.types import S3SessionKwargs from eodag.types.download_args import DownloadConf from eodag.utils import DownloadedCallback, Unpack @@ -112,7 +103,7 @@ class HTTPDownload(Download): extracted; default: ``True`` * :attr:`~eodag.config.PluginConfig.auth_error_code` (``int``): which error code is returned in case of an authentication error - * :attr:`~eodag.config.PluginConfig.dl_url_params` (``Dict[str, Any]``): parameters to be + * :attr:`~eodag.config.PluginConfig.dl_url_params` (``dict[str, Any]``): parameters to be added to the query params of the request * :attr:`~eodag.config.PluginConfig.archive_depth` (``int``): level in extracted path tree where to find data; default: ``1`` @@ -131,7 +122,7 @@ class HTTPDownload(Download): the search plugin used for the provider; default: ``False`` * :attr:`~eodag.config.PluginConfig.order_method` (``str``): HTTP request method for the order request (``GET`` or ``POST``); default: ``GET`` - * :attr:`~eodag.config.PluginConfig.order_headers` (``[Dict[str, str]]``): headers to be added to the order + * :attr:`~eodag.config.PluginConfig.order_headers` (``[dict[str, str]]``): headers to be added to the order request * :attr:`~eodag.config.PluginConfig.order_on_response` (:class:`~eodag.config.PluginConfig.OrderOnResponse`): a typed dictionary containing the key ``metadata_mapping`` which can be used to add new product properties @@ -139,7 +130,7 @@ class HTTPDownload(Download): * :attr:`~eodag.config.PluginConfig.order_status` (:class:`~eodag.config.PluginConfig.OrderStatus`): configuration to handle the order status; contains information which method to use, how the response data is interpreted, which status corresponds to success, ordered and error and what should be done on success. - * :attr:`~eodag.config.PluginConfig.products` (``Dict[str, Dict[str, Any]``): product type specific config; the + * :attr:`~eodag.config.PluginConfig.products` (``dict[str, dict[str, Any]``): product type specific config; the keys are the product types, the values are dictionaries which can contain the key :attr:`~eodag.config.PluginConfig.extract` to overwrite the provider config for a specific product type @@ -153,7 +144,7 @@ def _order( product: EOProduct, auth: Optional[AuthBase] = None, **kwargs: Unpack[DownloadConf], - ) -> Optional[Dict[str, Any]]: + ) -> Optional[dict[str, Any]]: """Send product order request. It will be executed once before the download retry loop, if the product is OFFLINE @@ -185,7 +176,7 @@ def _order( ssl_verify = getattr(self.config, "ssl_verify", True) timeout = getattr(self.config, "timeout", HTTP_REQ_TIMEOUT) OrderKwargs = TypedDict( - "OrderKwargs", {"json": Dict[str, Union[Any, List[str]]]}, total=False + "OrderKwargs", {"json": dict[str, Union[Any, list[str]]]}, total=False ) order_kwargs: OrderKwargs = {} if order_method == "POST": @@ -237,7 +228,7 @@ def _order( def order_response_process( self, response: Response, product: EOProduct - ) -> Optional[Dict[str, Any]]: + ) -> Optional[dict[str, Any]]: """Process order response :param response: The order response @@ -301,7 +292,7 @@ def _order_status( def _request( url: str, method: str = "GET", - headers: Optional[Dict[str, Any]] = None, + headers: Optional[dict[str, Any]] = None, json: Optional[Any] = None, timeout: int = HTTP_REQ_TIMEOUT, ) -> Response: @@ -337,7 +328,7 @@ def _request( except requests.exceptions.Timeout as exc: raise TimeOutError(exc, timeout=timeout) from exc - status_request: Dict[str, Any] = status_config.get("request", {}) + status_request: dict[str, Any] = status_config.get("request", {}) status_request_method = str(status_request.get("method", "GET")).upper() if status_request_method == "POST": @@ -354,8 +345,8 @@ def _request( # check header for success before full status request skip_parsing_status_response = False - status_dict: Dict[str, Any] = {} - config_on_success: Dict[str, Any] = status_config.get("on_success", {}) + status_dict: dict[str, Any] = {} + config_on_success: dict[str, Any] = status_config.get("on_success", {}) on_success_mm = config_on_success.get("metadata_mapping", {}) status_response_content_needed = ( @@ -439,13 +430,13 @@ def _request( product.properties["orderStatus"] = status_dict.get("status") # handle status error - errors: Dict[str, Any] = status_config.get("error", {}) + errors: dict[str, Any] = status_config.get("error", {}) if errors and errors.items() <= status_dict.items(): raise DownloadError( f"Provider {product.provider} returned: {status_dict.get('error_message', status_message)}" ) - success_status: Dict[str, Any] = status_config.get("success", {}).get("status") + success_status: dict[str, Any] = status_config.get("success", {}).get("status") # if not success if (success_status and success_status != status_dict.get("status")) or ( success_code and success_code != response.status_code @@ -563,7 +554,7 @@ def _request( def download( self, product: EOProduct, - auth: Optional[Union[AuthBase, Dict[str, str]]] = None, + auth: Optional[Union[AuthBase, S3SessionKwargs]] = None, progress_callback: Optional[ProgressCallback] = None, wait: float = DEFAULT_DOWNLOAD_WAIT, timeout: float = DEFAULT_DOWNLOAD_TIMEOUT, @@ -722,7 +713,7 @@ def _check_product_filename(self, product: EOProduct) -> str: def _stream_download_dict( self, product: EOProduct, - auth: Optional[Union[AuthBase, Dict[str, str]]] = None, + auth: Optional[Union[AuthBase, S3SessionKwargs]] = None, progress_callback: Optional[ProgressCallback] = None, wait: float = DEFAULT_DOWNLOAD_WAIT, timeout: float = DEFAULT_DOWNLOAD_TIMEOUT, @@ -891,7 +882,7 @@ def _order_request( def order( self, product: EOProduct, - auth: Optional[Union[AuthBase, Dict[str, str]]] = None, + auth: Optional[Union[AuthBase, S3SessionKwargs]] = None, wait: float = DEFAULT_DOWNLOAD_WAIT, timeout: float = DEFAULT_DOWNLOAD_TIMEOUT, ) -> None: @@ -954,7 +945,7 @@ def _stream_download( if not query_dict and parts.query: query_dict = geojson.loads(parts.query) req_url = parts._replace(query="").geturl() - req_kwargs: Dict[str, Any] = {"json": query_dict} if query_dict else {} + req_kwargs: dict[str, Any] = {"json": query_dict} if query_dict else {} else: req_url = url req_kwargs = {} @@ -1022,7 +1013,7 @@ def _stream_download_assets( product: EOProduct, auth: Optional[AuthBase] = None, progress_callback: Optional[ProgressCallback] = None, - assets_values: List[Asset] = [], + assets_values: list[Asset] = [], **kwargs: Unpack[DownloadConf], ) -> Iterator[Any]: if progress_callback is None: @@ -1292,9 +1283,9 @@ def _handle_asset_exception(self, e: RequestException, asset: Asset) -> None: def _get_asset_sizes( self, - assets_values: List[Asset], + assets_values: list[Asset], auth: Optional[AuthBase], - params: Optional[Dict[str, str]], + params: Optional[dict[str, str]], zipped: bool = False, ) -> int: total_size = 0 @@ -1367,7 +1358,7 @@ def _get_asset_sizes( def download_all( self, products: SearchResult, - auth: Optional[Union[AuthBase, Dict[str, str]]] = None, + auth: Optional[Union[AuthBase, S3SessionKwargs]] = None, downloaded_callback: Optional[DownloadedCallback] = None, progress_callback: Optional[ProgressCallback] = None, wait: float = DEFAULT_DOWNLOAD_WAIT, diff --git a/eodag/plugins/download/s3rest.py b/eodag/plugins/download/s3rest.py index 4be91d87d..439314921 100644 --- a/eodag/plugins/download/s3rest.py +++ b/eodag/plugins/download/s3rest.py @@ -20,7 +20,7 @@ import logging import os import os.path -from typing import TYPE_CHECKING, Dict, List, Optional, Union +from typing import TYPE_CHECKING, Optional, Union from xml.dom import minidom from xml.parsers.expat import ExpatError @@ -54,6 +54,7 @@ if TYPE_CHECKING: from eodag.api.product import EOProduct from eodag.config import PluginConfig + from eodag.types import S3SessionKwargs from eodag.types.download_args import DownloadConf from eodag.utils import Unpack @@ -78,7 +79,7 @@ class S3RestDownload(Download): * :attr:`~eodag.config.PluginConfig.order_enabled` (``bool``): whether order is enabled or not if product is `OFFLINE` * :attr:`~eodag.config.PluginConfig.order_method` (``str``) HTTP request method, ``GET`` (default) or ``POST`` - * :attr:`~eodag.config.PluginConfig.order_headers` (``[Dict[str, str]]``): order request headers + * :attr:`~eodag.config.PluginConfig.order_headers` (``[dict[str, str]]``): order request headers * :attr:`~eodag.config.PluginConfig.order_on_response` (:class:`~eodag.config.PluginConfig.OrderOnResponse`): a typed dictionary containing the key :attr:`~eodag.config.PluginConfig.OrderOnResponse.metadata_mapping` which can be used to add new product properties based on the data in response to the order request @@ -93,7 +94,7 @@ def __init__(self, provider: str, config: PluginConfig) -> None: def download( self, product: EOProduct, - auth: Optional[Union[AuthBase, Dict[str, str]]] = None, + auth: Optional[Union[AuthBase, S3SessionKwargs]] = None, progress_callback: Optional[ProgressCallback] = None, wait: float = DEFAULT_DOWNLOAD_WAIT, timeout: float = DEFAULT_DOWNLOAD_TIMEOUT, @@ -270,7 +271,7 @@ def download_request( os.remove(record_filename) # total size for progress_callback - size_list: List[int] = [ + size_list: list[int] = [ int(node.firstChild.nodeValue) # type: ignore[attr-defined] for node in xmldoc.getElementsByTagName("Size") if node.firstChild is not None diff --git a/eodag/plugins/manager.py b/eodag/plugins/manager.py index 671868c7a..550ddcee6 100644 --- a/eodag/plugins/manager.py +++ b/eodag/plugins/manager.py @@ -21,18 +21,7 @@ import re from operator import attrgetter from pathlib import Path -from typing import ( - TYPE_CHECKING, - Any, - Dict, - Iterator, - List, - Optional, - Tuple, - Type, - Union, - cast, -) +from typing import TYPE_CHECKING, Any, Iterator, Optional, Union, cast import pkg_resources @@ -61,6 +50,7 @@ from eodag.api.product import EOProduct from eodag.config import PluginConfig, ProviderConfig from eodag.plugins.base import PluginTopic + from eodag.types import S3SessionKwargs logger = logging.getLogger("eodag.plugins.manager") @@ -84,11 +74,11 @@ class PluginManager: supported_topics = set(PLUGINS_TOPICS_KEYS) - product_type_to_provider_config_map: Dict[str, List[ProviderConfig]] + product_type_to_provider_config_map: dict[str, list[ProviderConfig]] - skipped_plugins: List[str] + skipped_plugins: list[str] - def __init__(self, providers_config: Dict[str, ProviderConfig]) -> None: + def __init__(self, providers_config: dict[str, ProviderConfig]) -> None: self.skipped_plugins = [] self.providers_config = providers_config # Load all the plugins. This will make all plugin classes of a particular @@ -144,14 +134,14 @@ def __init__(self, providers_config: Dict[str, ProviderConfig]) -> None: self.rebuild() def rebuild( - self, providers_config: Optional[Dict[str, ProviderConfig]] = None + self, providers_config: Optional[dict[str, ProviderConfig]] = None ) -> None: """(Re)Build plugin manager mapping and cache""" if providers_config is not None: self.providers_config = providers_config self.build_product_type_to_provider_config_map() - self._built_plugins_cache: Dict[Tuple[str, str, str], Any] = {} + self._built_plugins_cache: dict[tuple[str, str, str], Any] = {} def build_product_type_to_provider_config_map(self) -> None: """Build mapping conf between product types and providers""" @@ -211,7 +201,7 @@ def get_plugin() -> Union[Search, Api]: ) return plugin - configs: Optional[List[ProviderConfig]] + configs: Optional[list[ProviderConfig]] if product_type: configs = self.product_type_to_provider_config_map.get(product_type) if not configs: @@ -378,7 +368,7 @@ def get_auth( provider: str, matching_url: Optional[str] = None, matching_conf: Optional[PluginConfig] = None, - ) -> Optional[Union[AuthBase, Dict[str, str]]]: + ) -> Optional[Union[AuthBase, S3SessionKwargs]]: """Authenticate and return the authenticated object for the first matching authentication plugin @@ -447,7 +437,7 @@ def _build_plugin( self, provider: str, plugin_conf: PluginConfig, - topic_class: Type[PluginTopic], + topic_class: type[PluginTopic], ) -> Union[Api, Search, Download, Authentication, Crunch]: """Build the plugin of the given topic with the given plugin configuration and registered as the given provider diff --git a/eodag/plugins/search/__init__.py b/eodag/plugins/search/__init__.py index 63a521fbe..ff57f820b 100644 --- a/eodag/plugins/search/__init__.py +++ b/eodag/plugins/search/__init__.py @@ -24,11 +24,12 @@ from eodag.utils import DEFAULT_ITEMS_PER_PAGE, DEFAULT_PAGE if TYPE_CHECKING: - from typing import Any, Dict, List, Optional, Union + from typing import Any, Optional, Union from requests.auth import AuthBase from eodag.plugins.authentication.base import Authentication + from eodag.types import S3SessionKwargs @dataclass @@ -38,7 +39,7 @@ class PreparedSearch: product_type: Optional[str] = None page: Optional[int] = DEFAULT_PAGE items_per_page: Optional[int] = DEFAULT_ITEMS_PER_PAGE - auth: Optional[Union[AuthBase, Dict[str, str]]] = None + auth: Optional[Union[AuthBase, S3SessionKwargs]] = None auth_plugin: Optional[Authentication] = None count: bool = True url: Optional[str] = None @@ -46,9 +47,9 @@ class PreparedSearch: exception_message: Optional[str] = None need_count: bool = field(init=False, repr=False) - query_params: Dict[str, Any] = field(init=False, repr=False) + query_params: dict[str, Any] = field(init=False, repr=False) query_string: str = field(init=False, repr=False) - search_urls: List[str] = field(init=False, repr=False) - product_type_def_params: Dict[str, Any] = field(init=False, repr=False) + search_urls: list[str] = field(init=False, repr=False) + product_type_def_params: dict[str, Any] = field(init=False, repr=False) total_items_nb: int = field(init=False, repr=False) sort_by_qs: str = field(init=False, repr=False) diff --git a/eodag/plugins/search/base.py b/eodag/plugins/search/base.py index 24bcabf12..104b0f851 100644 --- a/eodag/plugins/search/base.py +++ b/eodag/plugins/search/base.py @@ -43,12 +43,13 @@ from eodag.utils.exceptions import ValidationError if TYPE_CHECKING: - from typing import Any, Dict, List, Optional, Tuple, Union + from typing import Any, Optional, Union from requests.auth import AuthBase from eodag.api.product import EOProduct from eodag.config import PluginConfig + from eodag.types import S3SessionKwargs logger = logging.getLogger("eodag.search.base") @@ -60,9 +61,9 @@ class Search(PluginTopic): :param config: An EODAG plugin configuration """ - auth: Union[AuthBase, Dict[str, str]] + auth: Union[AuthBase, S3SessionKwargs] next_page_url: Optional[str] - next_page_query_obj: Optional[Dict[str, Any]] + next_page_query_obj: Optional[dict[str, Any]] total_items_nb: int need_count: bool _request: Any # needed by deprecated load_stac_items @@ -71,7 +72,7 @@ def __init__(self, provider: str, config: PluginConfig) -> None: super(Search, self).__init__(provider, config) # Prepare the metadata mapping # Do a shallow copy, the structure is flat enough for this to be sufficient - metas: Dict[str, Any] = DEFAULT_METADATA_MAPPING.copy() + metas: dict[str, Any] = DEFAULT_METADATA_MAPPING.copy() # Update the defaults with the mapping value. This will add any new key # added by the provider mapping that is not in the default metadata if self.config.metadata_mapping: @@ -90,7 +91,7 @@ def query( self, prep: PreparedSearch = PreparedSearch(), **kwargs: Any, - ) -> Tuple[List[EOProduct], Optional[int]]: + ) -> tuple[list[EOProduct], Optional[int]]: """Implementation of how the products must be searched goes here. This method must return a tuple with (1) a list of :class:`~eodag.api.product._product.EOProduct` instances @@ -99,13 +100,13 @@ def query( """ raise NotImplementedError("A Search plugin must implement a method named query") - def discover_product_types(self, **kwargs: Any) -> Optional[Dict[str, Any]]: + def discover_product_types(self, **kwargs: Any) -> Optional[dict[str, Any]]: """Fetch product types list from provider using `discover_product_types` conf""" return None def discover_queryables( self, **kwargs: Any - ) -> Optional[Dict[str, Annotated[Any, FieldInfo]]]: + ) -> Optional[dict[str, Annotated[Any, FieldInfo]]]: """Fetch queryables list from provider using :attr:`~eodag.config.PluginConfig.discover_queryables` conf :param kwargs: additional filters for queryables (``productType`` and other search @@ -118,7 +119,7 @@ def discover_queryables( def _get_defaults_as_queryables( self, product_type: str - ) -> Dict[str, Annotated[Any, FieldInfo]]: + ) -> dict[str, Annotated[Any, FieldInfo]]: """ Return given product type default settings as queryables @@ -128,7 +129,7 @@ def _get_defaults_as_queryables( defaults = deepcopy(self.config.products.get(product_type, {})) defaults.pop("metadata_mapping", None) - queryables: Dict[str, Annotated[Any, FieldInfo]] = {} + queryables: dict[str, Annotated[Any, FieldInfo]] = {} for parameter, value in defaults.items(): queryables[parameter] = Annotated[type(value), Field(default=value)] return queryables @@ -150,7 +151,7 @@ def map_product_type( def get_product_type_def_params( self, product_type: str, **kwargs: Any - ) -> Dict[str, Any]: + ) -> dict[str, Any]: """Get the provider product type definition parameters and specific settings :param product_type: the desired product type @@ -200,7 +201,7 @@ def get_product_type_cfg_value(self, key: str, default: Any = None) -> Any: def get_metadata_mapping( self, product_type: Optional[str] = None - ) -> Dict[str, Union[str, List[str]]]: + ) -> dict[str, Union[str, list[str]]]: """Get the plugin metadata mapping configuration (product type specific if exists) :param product_type: the desired product type @@ -212,7 +213,7 @@ def get_metadata_mapping( ) return self.config.metadata_mapping - def get_sort_by_arg(self, kwargs: Dict[str, Any]) -> Optional[SortByList]: + def get_sort_by_arg(self, kwargs: dict[str, Any]) -> Optional[SortByList]: """Extract the ``sort_by`` argument from the kwargs or the provider default sort configuration :param kwargs: Search arguments @@ -233,7 +234,7 @@ def get_sort_by_arg(self, kwargs: Dict[str, Any]) -> Optional[SortByList]: def build_sort_by( self, sort_by_arg: SortByList - ) -> Tuple[str, Dict[str, List[Dict[str, str]]]]: + ) -> tuple[str, dict[str, list[dict[str, str]]]]: """Build the sorting part of the query string or body by transforming the ``sort_by`` argument into a provider-specific string or dictionary @@ -247,9 +248,9 @@ def build_sort_by( sort_by_arg = list(dict.fromkeys(sort_by_arg)) sort_by_qs: str = "" - sort_by_qp: Dict[str, Any] = {} + sort_by_qp: dict[str, Any] = {} - provider_sort_by_tuples_used: List[Tuple[str, str]] = [] + provider_sort_by_tuples_used: list[tuple[str, str]] = [] for eodag_sort_by_tuple in sort_by_arg: eodag_sort_param = eodag_sort_by_tuple[0] provider_sort_param = self.config.sort["sort_param_mapping"].get( @@ -282,7 +283,7 @@ def build_sort_by( if eodag_sort_order == "ASC" else self.config.sort["sort_order_mapping"]["descending"] ) - provider_sort_by_tuple: Tuple[str, str] = ( + provider_sort_by_tuple: tuple[str, str] = ( provider_sort_param, provider_sort_order, ) @@ -315,7 +316,7 @@ def build_sort_by( sort_order=provider_sort_by_tuple[1], ) try: - parsed_sort_by_tpl_dict: Dict[str, Any] = orjson.loads( + parsed_sort_by_tpl_dict: dict[str, Any] = orjson.loads( parsed_sort_by_tpl ) sort_by_qp = update_nested_dict( @@ -326,9 +327,9 @@ def build_sort_by( return (sort_by_qs, sort_by_qp) def _get_product_type_queryables( - self, product_type: Optional[str], alias: Optional[str], filters: Dict[str, Any] + self, product_type: Optional[str], alias: Optional[str], filters: dict[str, Any] ) -> QueryablesDict: - default_values: Dict[str, Any] = deepcopy( + default_values: dict[str, Any] = deepcopy( getattr(self.config, "products", {}).get(product_type, {}) ) default_values.pop("metadata_mapping", None) @@ -342,9 +343,9 @@ def _get_product_type_queryables( def list_queryables( self, - filters: Dict[str, Any], - available_product_types: List[Any], - product_type_configs: Dict[str, Dict[str, Any]], + filters: dict[str, Any], + available_product_types: list[Any], + product_type_configs: dict[str, dict[str, Any]], product_type: Optional[str] = None, alias: Optional[str] = None, ) -> QueryablesDict: @@ -375,7 +376,7 @@ def list_queryables( return queryables else: - all_queryables: Dict[str, Any] = {} + all_queryables: dict[str, Any] = {} for pt in available_product_types: self.config.product_type_config = product_type_configs[pt] pt_queryables = self._get_product_type_queryables(pt, None, filters) @@ -393,18 +394,18 @@ def list_queryables( def queryables_from_metadata_mapping( self, product_type: Optional[str] = None, alias: Optional[str] = None - ) -> Dict[str, Annotated[Any, FieldInfo]]: + ) -> dict[str, Annotated[Any, FieldInfo]]: """ Extract queryable parameters from product type metadata mapping. :param product_type: product type id (optional) :param alias: (optional) alias of the product type :returns: dict of annotated queryables """ - metadata_mapping: Dict[str, Any] = deepcopy( + metadata_mapping: dict[str, Any] = deepcopy( self.get_metadata_mapping(product_type) ) - queryables: Dict[str, Annotated[Any, FieldInfo]] = {} + queryables: dict[str, Annotated[Any, FieldInfo]] = {} for param in list(metadata_mapping.keys()): if NOT_MAPPED in metadata_mapping[param] or not isinstance( diff --git a/eodag/plugins/search/build_search_result.py b/eodag/plugins/search/build_search_result.py index 46476d10e..143a93f3a 100644 --- a/eodag/plugins/search/build_search_result.py +++ b/eodag/plugins/search/build_search_result.py @@ -23,18 +23,7 @@ import re from collections import OrderedDict from datetime import datetime, timedelta -from typing import ( - TYPE_CHECKING, - Annotated, - Any, - Dict, - List, - Optional, - Set, - Tuple, - Union, - cast, -) +from typing import TYPE_CHECKING, Annotated, Any, Optional, Union, cast from urllib.parse import quote_plus, unquote_plus import geojson @@ -205,8 +194,8 @@ def keywords_to_mdt( - keywords: List[str], prefix: Optional[str] = None -) -> Dict[str, Any]: + keywords: list[str], prefix: Optional[str] = None +) -> dict[str, Any]: """ Make metadata mapping dict from a list of keywords @@ -223,7 +212,7 @@ def keywords_to_mdt( :param prefix: prefix to be added to the parameter in the mapping :return: metadata mapping dict """ - mdt: Dict[str, Any] = {} + mdt: dict[str, Any] = {} for keyword in keywords: key = f"{prefix}:{keyword}" if prefix else keyword mdt[key] = [keyword, f'$."{key}"'] @@ -251,7 +240,7 @@ def strip_quotes(value: Any) -> Any: def _update_properties_from_element( - prop: Dict[str, Any], element: Dict[str, Any], values: List[str] + prop: dict[str, Any], element: dict[str, Any], values: list[str] ) -> None: """updates a property dict with the given values based on the information from the element dict e.g. the type is set based on the type of the element @@ -333,7 +322,7 @@ class ECMWFSearch(PostJsonSearch): :param provider: An eodag providers configuration dictionary :param config: Search plugin configuration: - * :attr:`~eodag.config.PluginConfig.remove_from_query` (``List[str]``): List of parameters + * :attr:`~eodag.config.PluginConfig.remove_from_query` (``list[str]``): List of parameters used to parse metadata but that must not be included to the query * :attr:`~eodag.config.PluginConfig.end_date_excluded` (``bool``): Set to `False` if provider does not include end date to search @@ -402,7 +391,7 @@ def __init__(self, provider: str, config: PluginConfig) -> None: "metadata_mapping" ] = product_type_metadata_mapping - def do_search(self, *args: Any, **kwargs: Any) -> List[Dict[str, Any]]: + def do_search(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]: """Should perform the actual search request. :param args: arguments to be used in the search @@ -416,7 +405,7 @@ def query( self, prep: PreparedSearch = PreparedSearch(), **kwargs: Any, - ) -> Tuple[List[EOProduct], Optional[int]]: + ) -> tuple[list[EOProduct], Optional[int]]: """Build ready-to-download SearchResult :param prep: :class:`~eodag.plugins.search.PreparedSearch` object containing information needed for the search @@ -439,7 +428,7 @@ def clear(self) -> None: def build_query_string( self, product_type: str, **kwargs: Any - ) -> Tuple[Dict[str, Any], str]: + ) -> tuple[dict[str, Any], str]: """Build The query string using the search parameters :param product_type: product type id @@ -464,7 +453,7 @@ def build_query_string( ) def _preprocess_search_params( - self, params: Dict[str, Any], product_type: Optional[str] + self, params: dict[str, Any], product_type: Optional[str] ) -> None: """Preprocess search parameters before making a request to the CDS API. @@ -544,10 +533,10 @@ def _preprocess_search_params( params["geometry"] = get_geometry_from_various(geometry=params["geometry"]) def _get_product_type_queryables( - self, product_type: Optional[str], alias: Optional[str], filters: Dict[str, Any] + self, product_type: Optional[str], alias: Optional[str], filters: dict[str, Any] ) -> QueryablesDict: """Override to set additional_properties to false.""" - default_values: Dict[str, Any] = deepcopy( + default_values: dict[str, Any] = deepcopy( getattr(self.config, "products", {}).get(product_type, {}) ) default_values.pop("metadata_mapping", None) @@ -559,7 +548,7 @@ def _get_product_type_queryables( def discover_queryables( self, **kwargs: Any - ) -> Optional[Dict[str, Annotated[Any, FieldInfo]]]: + ) -> Optional[dict[str, Annotated[Any, FieldInfo]]]: """Fetch queryables list from provider using its constraints file :param kwargs: additional filters for queryables (`productType` and other search @@ -585,7 +574,7 @@ def discover_queryables( getattr(self.config, "discover_queryables", {}).get("constraints_url", ""), **kwargs, ) - constraints: List[Dict[str, Any]] = self.fetch_data(constraints_url) + constraints: list[dict[str, Any]] = self.fetch_data(constraints_url) form_url = format_metadata( getattr(self.config, "discover_queryables", {}).get("form_url", ""), @@ -614,18 +603,18 @@ def discover_queryables( # we use non empty kwargs as default to integrate user inputs # it is needed because pydantic json schema does not represent "value" # but only "default" - non_empty_formated: Dict[str, Any] = { + non_empty_formated: dict[str, Any] = { k: v for k, v in formated_kwargs.items() if v and (not isinstance(v, list) or all(v)) } - non_empty_kwargs: Dict[str, Any] = { + non_empty_kwargs: dict[str, Any] = { k: v for k, v in processed_kwargs.items() if v and (not isinstance(v, list) or all(v)) } - required_keywords: Set[str] = set() + required_keywords: set[str] = set() # calculate available values if constraints: @@ -703,7 +692,7 @@ def discover_queryables( # area is geom in EODAG. if queryables.pop("area", None): queryables["geom"] = Annotated[ - Union[str, Dict[str, float], BaseGeometry], + Union[str, dict[str, float], BaseGeometry], Field( None, description="Read EODAG documentation for all supported geometry format.", @@ -714,10 +703,10 @@ def discover_queryables( def available_values_from_constraints( self, - constraints: list[Dict[str, Any]], - input_keywords: Dict[str, Any], - form_keywords: List[str], - ) -> Dict[str, List[str]]: + constraints: list[dict[str, Any]], + input_keywords: dict[str, Any], + form_keywords: list[str], + ) -> dict[str, list[str]]: """ Filter constraints using input_keywords. Return list of available queryables. All constraint entries must have the same parameters. @@ -741,9 +730,9 @@ def available_values_from_constraints( ) # filter constraint entries matching input keyword values - filtered_constraints: List[Dict[str, Any]] + filtered_constraints: list[dict[str, Any]] - parsed_keywords: List[str] = [] + parsed_keywords: list[str] = [] for keyword in ordered_keywords: values = input_keywords.get(keyword) @@ -822,7 +811,7 @@ def available_values_from_constraints( parsed_keywords.append(keyword) constraints = filtered_constraints - available_values: Dict[str, Any] = {k: set() for k in ordered_keywords} + available_values: dict[str, Any] = {k: set() for k in ordered_keywords} # we aggregate the constraint entries left for entry in constraints: @@ -833,10 +822,10 @@ def available_values_from_constraints( def queryables_by_form( self, - form: List[Dict[str, Any]], - available_values: Dict[str, List[str]], - defaults: Dict[str, Any], - ) -> Dict[str, Annotated[Any, FieldInfo]]: + form: list[dict[str, Any]], + available_values: dict[str, list[str]], + defaults: dict[str, Any], + ) -> dict[str, Annotated[Any, FieldInfo]]: """ Generate Annotated field definitions from form entries and available values Used by Copernicus services like cop_cds, cop_ads, cop_ewds. @@ -846,9 +835,9 @@ def queryables_by_form( :param defaults: default values for the parameters :return: dict of annotated queryables """ - queryables: Dict[str, Annotated[Any, FieldInfo]] = {} + queryables: dict[str, Annotated[Any, FieldInfo]] = {} - required_list: List[str] = [] + required_list: list[str] = [] for element in form: name: str = element["name"] @@ -915,10 +904,10 @@ def queryables_by_form( def queryables_by_values( self, - available_values: Dict[str, List[str]], - required_keywords: List[str], - defaults: Dict[str, Any], - ) -> Dict[str, Annotated[Any, FieldInfo]]: + available_values: dict[str, list[str]], + required_keywords: list[str], + defaults: dict[str, Any], + ) -> dict[str, Annotated[Any, FieldInfo]]: """ Generate Annotated field definitions from available values. Used by ECMWF data providers like dedt_lumi. @@ -932,7 +921,7 @@ def queryables_by_values( # Needed to map constraints like "xxxx" to eodag parameter "ecmwf:xxxx" required = [ecmwf_format(k) for k in required_keywords] - queryables: Dict[str, Annotated[Any, FieldInfo]] = {} + queryables: dict[str, Annotated[Any, FieldInfo]] = {} for name, values in available_values.items(): # Rename keywords from form with metadata mapping. # Needed to map constraints like "xxxx" to eodag parameter "ecmwf:xxxx" @@ -953,8 +942,8 @@ def queryables_by_values( return queryables def format_as_provider_keyword( - self, product_type: str, properties: Dict[str, Any] - ) -> Dict[str, Any]: + self, product_type: str, properties: dict[str, Any] + ) -> dict[str, Any]: """Return provider equivalent keyword names from EODAG keywords. :param product_type: product type id @@ -992,7 +981,7 @@ def _fetch_data(self, url: str) -> Any: def normalize_results( self, results: RawSearchResult, **kwargs: Any - ) -> List[EOProduct]: + ) -> list[EOProduct]: """Build :class:`~eodag.api.product._product.EOProduct` from provider result :param results: Raw provider result as single dict in list @@ -1164,7 +1153,7 @@ def collect_search_urls( self, prep: PreparedSearch = PreparedSearch(), **kwargs: Any, - ) -> Tuple[List[str], int]: + ) -> tuple[list[str], int]: """Wraps PostJsonSearch.collect_search_urls to force product count to 1 :param prep: :class:`~eodag.plugins.search.PreparedSearch` object containing information for the search @@ -1176,7 +1165,7 @@ def collect_search_urls( def do_search( self, prep: PreparedSearch = PreparedSearch(items_per_page=None), **kwargs: Any - ) -> List[Dict[str, Any]]: + ) -> list[dict[str, Any]]: """Perform the actual search request, and return result in a single element. :param prep: :class:`~eodag.plugins.search.PreparedSearch` object containing information for the search @@ -1196,7 +1185,7 @@ def do_search( def build_query_string( self, product_type: str, **kwargs: Any - ) -> Tuple[Dict[str, Any], str]: + ) -> tuple[dict[str, Any], str]: """Build The query string using the search parameters :param product_type: product type id @@ -1235,7 +1224,7 @@ class WekeoECMWFSearch(ECMWFSearch): def normalize_results( self, results: RawSearchResult, **kwargs: Any - ) -> List[EOProduct]: + ) -> list[EOProduct]: """Build :class:`~eodag.api.product._product.EOProduct` from provider result :param results: Raw provider result as single dict in list @@ -1261,7 +1250,7 @@ def normalize_results( return normalized - def do_search(self, *args: Any, **kwargs: Any) -> List[Dict[str, Any]]: + def do_search(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]: """Should perform the actual search request. :param args: arguments to be used in the search @@ -1272,7 +1261,7 @@ def do_search(self, *args: Any, **kwargs: Any) -> List[Dict[str, Any]]: def build_query_string( self, product_type: str, **kwargs: Any - ) -> Tuple[Dict[str, Any], str]: + ) -> tuple[dict[str, Any], str]: """Build The query string using the search parameters :param product_type: product type id diff --git a/eodag/plugins/search/cop_marine.py b/eodag/plugins/search/cop_marine.py index a1000ff28..57bb63908 100644 --- a/eodag/plugins/search/cop_marine.py +++ b/eodag/plugins/search/cop_marine.py @@ -22,7 +22,7 @@ import os import re from datetime import datetime -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, cast +from typing import TYPE_CHECKING, Any, Optional, cast from urllib.parse import urlsplit import boto3 @@ -69,8 +69,8 @@ def _get_date_from_yyyymmdd(date_str: str, item_key: str) -> Optional[datetime]: def _get_dates_from_dataset_data( - dataset_item: Dict[str, Any] -) -> Optional[Dict[str, str]]: + dataset_item: dict[str, Any] +) -> Optional[dict[str, str]]: dates = {} if "start_datetime" in dataset_item["properties"]: dates["start"] = dataset_item["properties"]["start_datetime"] @@ -96,7 +96,7 @@ def _get_s3_client(endpoint_url: str) -> S3Client: ) -def _check_int_values_properties(properties: Dict[str, Any]): +def _check_int_values_properties(properties: dict[str, Any]): # remove int values with a bit length of more than 64 from the properties invalid = [] for prop, prop_value in properties.items(): @@ -134,7 +134,7 @@ def __init__(self, provider: str, config: PluginConfig): def _get_product_type_info( self, product_type: str - ) -> Tuple[Dict[str, Any], List[Dict[str, Any]]]: + ) -> tuple[dict[str, Any], list[dict[str, Any]]]: """Fetch product type and associated datasets info""" fetch_url = cast(str, self.config.discover_product_types["fetch_url"]).format( @@ -183,8 +183,8 @@ def _get_product_by_id( product_id: str, s3_url: str, product_type: str, - dataset_item: Dict[str, Any], - collection_dict: Dict[str, Any], + dataset_item: dict[str, Any], + collection_dict: dict[str, Any], ): # try to find date(s) in product id item_dates = re.findall(r"(\d{4})(0[1-9]|1[0-2])([0-3]\d)", product_id) @@ -208,8 +208,8 @@ def _create_product( product_type: str, item_key: str, s3_url: str, - dataset_item: Dict[str, Any], - collection_dict: Dict[str, Any], + dataset_item: dict[str, Any], + collection_dict: dict[str, Any], use_dataset_dates: bool = False, ) -> Optional[EOProduct]: @@ -288,7 +288,7 @@ def query( self, prep: PreparedSearch = PreparedSearch(), **kwargs: Any, - ) -> Tuple[List[EOProduct], Optional[int]]: + ) -> tuple[list[EOProduct], Optional[int]]: """ Implementation of search for the Copernicus Marine provider :param prep: object containing search parameterds @@ -308,7 +308,7 @@ def query( "parameter product type is required for search with cop_marine provider" ) collection_dict, datasets_items_list = self._get_product_type_info(product_type) - products: List[EOProduct] = [] + products: list[EOProduct] = [] start_index = items_per_page * (page - 1) + 1 num_total = 0 for i, dataset_item in enumerate(datasets_items_list): diff --git a/eodag/plugins/search/creodias_s3.py b/eodag/plugins/search/creodias_s3.py index f6d471a6d..656a9d766 100644 --- a/eodag/plugins/search/creodias_s3.py +++ b/eodag/plugins/search/creodias_s3.py @@ -17,7 +17,7 @@ # limitations under the License. import logging from types import MethodType -from typing import Any, List +from typing import Any import boto3 import botocore @@ -139,7 +139,7 @@ def __init__(self, provider, config): def normalize_results( self, results: RawSearchResult, **kwargs: Any - ) -> List[EOProduct]: + ) -> list[EOProduct]: """Build EOProducts from provider results""" products = super(CreodiasS3Search, self).normalize_results(results, **kwargs) diff --git a/eodag/plugins/search/csw.py b/eodag/plugins/search/csw.py index 996519825..9c54e57be 100644 --- a/eodag/plugins/search/csw.py +++ b/eodag/plugins/search/csw.py @@ -19,7 +19,7 @@ import logging import re -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Optional, Union import pyproj from owslib.csw import CatalogueServiceWeb @@ -60,13 +60,13 @@ class CSWSearch(Search): * :attr:`~eodag.config.PluginConfig.api_endpoint` (``str``) (**mandatory**): The endpoint of the provider's search interface * :attr:`~eodag.config.PluginConfig.version` (``str``): OGC Catalogue Service version; default: ``2.0.2`` - * :attr:`~eodag.config.PluginConfig.search_definition` (``Dict[str, Any]``) (**mandatory**): + * :attr:`~eodag.config.PluginConfig.search_definition` (``dict[str, Any]``) (**mandatory**): - * **product_type_tags** (``List[Dict[str, Any]``): dict of product type tags + * **product_type_tags** (``list[dict[str, Any]``): dict of product type tags * **resource_location_filter** (``str``): regex string - * **date_tags** (``Dict[str, Any]``): tags for start and end + * **date_tags** (``dict[str, Any]``): tags for start and end - * :attr:`~eodag.config.PluginConfig.metadata_mapping` (``Dict[str, Any]``): The search plugins of this kind can + * :attr:`~eodag.config.PluginConfig.metadata_mapping` (``dict[str, Any]``): The search plugins of this kind can detect when a metadata mapping is "query-able", and get the semantics of how to format the query string parameter that enables to make a query on the corresponding metadata. To make a metadata query-able, just configure it in the metadata mapping to be a list of 2 items, the first one being the @@ -107,7 +107,7 @@ def query( self, prep: PreparedSearch = PreparedSearch(), **kwargs: Any, - ) -> Tuple[List[EOProduct], Optional[int]]: + ) -> tuple[list[EOProduct], Optional[int]]: """Perform a search on a OGC/CSW-like interface""" product_type = kwargs.get("productType") if product_type is None: @@ -117,7 +117,7 @@ def query( self.__init_catalog(**getattr(auth.config, "credentials", {})) else: self.__init_catalog() - results: List[EOProduct] = [] + results: list[EOProduct] = [] if self.catalog: provider_product_type = self.config.products[product_type]["productType"] for product_type_def in self.config.search_definition["product_type_tags"]: @@ -229,12 +229,12 @@ def __build_product(self, rec: Any, product_type: str, **kwargs: Any) -> EOProdu def __convert_query_params( self, - product_type_def: Dict[str, Any], + product_type_def: dict[str, Any], product_type: str, - params: Dict[str, Any], - ) -> Union[List[OgcExpression], List[List[OgcExpression]]]: + params: dict[str, Any], + ) -> Union[list[OgcExpression], list[list[OgcExpression]]]: """Translates eodag search to CSW constraints using owslib constraint classes""" - constraints: List[OgcExpression] = [] + constraints: list[OgcExpression] = [] # How the match should be performed (fuzzy, prefix, postfix or exact). # defaults to fuzzy pt_tag, matching = ( diff --git a/eodag/plugins/search/data_request_search.py b/eodag/plugins/search/data_request_search.py index 97f97a118..a57ad0b25 100644 --- a/eodag/plugins/search/data_request_search.py +++ b/eodag/plugins/search/data_request_search.py @@ -20,7 +20,7 @@ import logging import time from datetime import datetime, timedelta, timezone -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, cast +from typing import TYPE_CHECKING, Any, Optional, cast import requests @@ -113,10 +113,10 @@ class DataRequestSearch(Search): * :attr:`~eodag.config.PluginConfig.DiscoverProductTypes.generic_product_type_id` (``str``): mapping for the product type id * :attr:`~eodag.config.PluginConfig.DiscoverProductTypes.generic_product_type_parsable_metadata` - (``Dict[str, str]``): mapping for product type metadata (e.g. ``abstract``, ``licence``) which can be parsed + (``dict[str, str]``): mapping for product type metadata (e.g. ``abstract``, ``licence``) which can be parsed from the provider result * :attr:`~eodag.config.PluginConfig.DiscoverProductTypes.generic_product_type_parsable_properties` - (``Dict[str, str]``): mapping for product type properties which can be parsed from the result and are not + (``dict[str, str]``): mapping for product type properties which can be parsed from the result and are not product type metadata * :attr:`~eodag.config.PluginConfig.DiscoverProductTypes.single_collection_fetch_url` (``str``): url to fetch data for a single collection; used if product type metadata is not available from the endpoint given in @@ -125,7 +125,7 @@ class DataRequestSearch(Search): to be added to the :attr:`~eodag.config.PluginConfig.DiscoverProductTypes.fetch_url` to filter for a collection * :attr:`~eodag.config.PluginConfig.DiscoverProductTypes.single_product_type_parsable_metadata` - (``Dict[str, str]``): mapping for product type metadata returned by the endpoint given in + (``dict[str, str]``): mapping for product type metadata returned by the endpoint given in :attr:`~eodag.config.PluginConfig.DiscoverProductTypes.single_collection_fetch_url`. * :attr:`~eodag.config.PluginConfig.constraints_file_url` (``str``): url to fetch the constraints for a specific @@ -133,7 +133,7 @@ class DataRequestSearch(Search): * :attr:`~eodag.config.PluginConfig.constraints_entry` (``str``): key in the json result where the constraints can be found; if not given, it is assumed that the constraints are on top level of the result, i.e. the result is an array of constraints - * :attr:`~eodag.config.PluginConfig.metadata_mapping` (``Dict[str, Any]``): The search plugins of this kind can + * :attr:`~eodag.config.PluginConfig.metadata_mapping` (``dict[str, Any]``): The search plugins of this kind can detect when a metadata mapping is "query-able", and get the semantics of how to format the query string parameter that enables to make a query on the corresponding metadata. To make a metadata query-able, just configure it in the metadata mapping to be a list of 2 items, the first one being the @@ -207,10 +207,10 @@ def __init__(self, provider: str, config: PluginConfig) -> None: self.config.pagination["next_page_url_key_path"] = string_to_jsonpath( self.config.pagination.get("next_page_url_key_path", None) ) - self.download_info: Dict[str, Any] = {} + self.download_info: dict[str, Any] = {} self.data_request_id = None - def discover_product_types(self, **kwargs: Any) -> Optional[Dict[str, Any]]: + def discover_product_types(self, **kwargs: Any) -> Optional[dict[str, Any]]: """Fetch product types is disabled for `DataRequestSearch` :returns: empty dict @@ -226,7 +226,7 @@ def query( self, prep: PreparedSearch = PreparedSearch(), **kwargs: Any, - ) -> Tuple[List[EOProduct], Optional[int]]: + ) -> tuple[list[EOProduct], Optional[int]]: """ performs the search for a provider where several steps are required to fetch the data """ @@ -431,7 +431,7 @@ def _check_request_status(self, data_request_id: str) -> bool: def _get_result_data( self, data_request_id: str, items_per_page: int, page: int - ) -> Dict[str, Any]: + ) -> dict[str, Any]: page = page - 1 + self.config.pagination.get("start_page", 1) url = self.config.result_url.format( jobId=data_request_id, items_per_page=items_per_page, page=page @@ -450,18 +450,18 @@ def _get_result_data( def _convert_result_data( self, - result_data: Dict[str, Any], + result_data: dict[str, Any], data_request_id: str, product_type: str, **kwargs: Any, - ) -> Tuple[List[EOProduct], int]: + ) -> tuple[list[EOProduct], int]: """Build EOProducts from provider results""" results_entry = self.config.results_entry results = result_data[results_entry] logger.debug( "Adapting %s plugin results to eodag product representation" % len(results) ) - products: List[EOProduct] = [] + products: list[EOProduct] = [] for result in results: product = EOProduct( self.provider, @@ -517,8 +517,8 @@ def _check_uses_custom_filters(self, product_type: str) -> bool: return False def _apply_additional_filters( - self, result: Dict[str, Any], custom_filters: Dict[str, str] - ) -> Dict[str, Any]: + self, result: dict[str, Any], custom_filters: dict[str, str] + ) -> dict[str, Any]: filtered_result = [] results_entry = self.config.results_entry results = result[results_entry] diff --git a/eodag/plugins/search/qssearch.py b/eodag/plugins/search/qssearch.py index 76bd77b09..29e605b9a 100644 --- a/eodag/plugins/search/qssearch.py +++ b/eodag/plugins/search/qssearch.py @@ -26,12 +26,8 @@ Annotated, Any, Callable, - Dict, - List, Optional, Sequence, - Set, - Tuple, TypedDict, cast, get_args, @@ -128,7 +124,7 @@ class QueryStringSearch(Search): authentication error; only used if ``need_auth=true`` * :attr:`~eodag.config.PluginConfig.ssl_verify` (``bool``): if the ssl certificates should be verified in requests; default: ``True`` - * :attr:`~eodag.config.PluginConfig.dont_quote` (``List[str]``): characters that should not be quoted in the + * :attr:`~eodag.config.PluginConfig.dont_quote` (``list[str]``): characters that should not be quoted in the url params * :attr:`~eodag.config.PluginConfig.timeout` (``int``): time to wait until request timeout in seconds; default: ``5`` @@ -136,10 +132,10 @@ class QueryStringSearch(Search): total number of retries to allow; default: ``3`` * :attr:`~eodag.config.PluginConfig.retry_backoff_factor` (``int``): :class:`urllib3.util.Retry` ``backoff_factor`` parameter, backoff factor to apply between attempts after the second try; default: ``2`` - * :attr:`~eodag.config.PluginConfig.retry_status_forcelist` (``List[int]``): :class:`urllib3.util.Retry` + * :attr:`~eodag.config.PluginConfig.retry_status_forcelist` (``list[int]``): :class:`urllib3.util.Retry` ``status_forcelist`` parameter, list of integer HTTP status codes that we should force a retry on; default: ``[401, 429, 500, 502, 503, 504]`` - * :attr:`~eodag.config.PluginConfig.literal_search_params` (``Dict[str, str]``): A mapping of (search_param => + * :attr:`~eodag.config.PluginConfig.literal_search_params` (``dict[str, str]``): A mapping of (search_param => search_value) pairs giving search parameters to be passed as is in the search url query string. This is useful for example in situations where the user wants to add a fixed search query parameter exactly as it is done on the provider interface. @@ -183,13 +179,13 @@ class QueryStringSearch(Search): * :attr:`~eodag.config.PluginConfig.DiscoverProductTypes.generic_product_type_id` (``str``): mapping for the product type id * :attr:`~eodag.config.PluginConfig.DiscoverProductTypes.generic_product_type_parsable_metadata` - (``Dict[str, str]``): mapping for product type metadata (e.g. ``abstract``, ``licence``) which can be parsed + (``dict[str, str]``): mapping for product type metadata (e.g. ``abstract``, ``licence``) which can be parsed from the provider result * :attr:`~eodag.config.PluginConfig.DiscoverProductTypes.generic_product_type_parsable_properties` - (``Dict[str, str]``): mapping for product type properties which can be parsed from the result and are not + (``dict[str, str]``): mapping for product type properties which can be parsed from the result and are not product type metadata * :attr:`~eodag.config.PluginConfig.DiscoverProductTypes.generic_product_type_unparsable_properties` - (``Dict[str, str]``): mapping for product type properties which cannot be parsed from the result and are not + (``dict[str, str]``): mapping for product type properties which cannot be parsed from the result and are not product type metadata * :attr:`~eodag.config.PluginConfig.DiscoverProductTypes.single_collection_fetch_url` (``str``): url to fetch data for a single collection; used if product type metadata is not available from the endpoint given in @@ -198,13 +194,13 @@ class QueryStringSearch(Search): to be added to the :attr:`~eodag.config.PluginConfig.DiscoverProductTypes.fetch_url` to filter for a collection * :attr:`~eodag.config.PluginConfig.DiscoverProductTypes.single_product_type_parsable_metadata` - (``Dict[str, str]``): mapping for product type metadata returned by the endpoint given in + (``dict[str, str]``): mapping for product type metadata returned by the endpoint given in :attr:`~eodag.config.PluginConfig.DiscoverProductTypes.single_collection_fetch_url`. * :attr:`~eodag.config.PluginConfig.sort` (:class:`~eodag.config.PluginConfig.Sort`): configuration for sorting the results. It contains the keys: - * :attr:`~eodag.config.PluginConfig.Sort.sort_by_default` (``List[Tuple(str, Literal["ASC", "DESC"])]``): + * :attr:`~eodag.config.PluginConfig.Sort.sort_by_default` (``list[Tuple(str, Literal["ASC", "DESC"])]``): parameter and sort order by which the result will be sorted by default (if the user does not enter a ``sort_by`` parameter); if not given the result will use the default sorting of the provider; Attention: for some providers sorting might cause a timeout if no filters are used. In that case no default @@ -220,12 +216,12 @@ class QueryStringSearch(Search): * :attr:`~eodag.config.PluginConfig.Sort.sort_param_mapping` (``Dict [str, str]``): mapping for the parameters available for sorting * :attr:`~eodag.config.PluginConfig.Sort.sort_order_mapping` - (``Dict[Literal["ascending", "descending"], str]``): mapping for the sort order + (``dict[Literal["ascending", "descending"], str]``): mapping for the sort order * :attr:`~eodag.config.PluginConfig.Sort.max_sort_params` (``int``): maximum number of sort parameters supported by the provider; used to validate the user input to avoid failed requests or unexpected behaviour (not all parameters are used in the request) - * :attr:`~eodag.config.PluginConfig.metadata_mapping` (``Dict[str, Any]``): The search plugins of this kind can + * :attr:`~eodag.config.PluginConfig.metadata_mapping` (``dict[str, Any]``): The search plugins of this kind can detect when a metadata mapping is "query-able", and get the semantics of how to format the query string parameter that enables to make a query on the corresponding metadata. To make a metadata query-able, just configure it in the metadata mapping to be a list of 2 items, the first one being the @@ -258,7 +254,7 @@ class QueryStringSearch(Search): metadata is activated; default: ``False``; if false, the other parameters are not used; * :attr:`~eodag.config.PluginConfig.DiscoverMetadata.metadata_pattern` (``str``): regex string a parameter in the result should match so that is used - * :attr:`~eodag.config.PluginConfig.DiscoverMetadata.search_param` (``Union [str, Dict[str, Any]]``): format + * :attr:`~eodag.config.PluginConfig.DiscoverMetadata.search_param` (``Union [str, dict[str, Any]]``): format to add a query param given by the user and not in the metadata mapping to the requests, 'metadata' will be replaced by the search param; can be a string or a dict containing :attr:`~eodag.config.PluginConfig.free_text_search_operations` @@ -286,7 +282,7 @@ class QueryStringSearch(Search): the result is an array of constraints """ - extract_properties: Dict[str, Callable[..., Dict[str, Any]]] = { + extract_properties: dict[str, Callable[..., dict[str, Any]]] = { "xml": properties_from_xml, "json": properties_from_json, } @@ -297,8 +293,8 @@ def __init__(self, provider: str, config: PluginConfig) -> None: self.config.__dict__.setdefault("results_entry", "features") self.config.__dict__.setdefault("pagination", {}) self.config.__dict__.setdefault("free_text_search_operations", {}) - self.search_urls: List[str] = [] - self.query_params: Dict[str, str] = dict() + self.search_urls: list[str] = [] + self.query_params: dict[str, str] = dict() self.query_string = "" self.next_page_url = None self.next_page_query_obj = None @@ -443,7 +439,7 @@ def clear(self) -> None: self.next_page_query_obj = None self.next_page_merge = None - def discover_product_types(self, **kwargs: Any) -> Optional[Dict[str, Any]]: + def discover_product_types(self, **kwargs: Any) -> Optional[dict[str, Any]]: """Fetch product types list from provider using `discover_product_types` conf :returns: configuration dict containing fetched product types information @@ -460,7 +456,7 @@ def discover_product_types(self, **kwargs: Any) -> Optional[Dict[str, Any]]: # no pagination return self.discover_product_types_per_page(**kwargs) - conf_update_dict: Dict[str, Any] = { + conf_update_dict: dict[str, Any] = { "providers_config": {}, "product_types_config": {}, } @@ -493,7 +489,7 @@ def discover_product_types(self, **kwargs: Any) -> Optional[Dict[str, Any]]: def discover_product_types_per_page( self, **kwargs: Any - ) -> Optional[Dict[str, Any]]: + ) -> Optional[dict[str, Any]]: """Fetch product types list from provider using `discover_product_types` conf using paginated ``kwargs["fetch_url"]`` @@ -551,7 +547,7 @@ def discover_product_types_per_page( return None else: try: - conf_update_dict: Dict[str, Any] = { + conf_update_dict: dict[str, Any] = { "providers_config": {}, "product_types_config": {}, } @@ -570,7 +566,7 @@ def discover_product_types_per_page( result = result[0] def conf_update_from_product_type_result( - product_type_result: Dict[str, Any] + product_type_result: dict[str, Any] ) -> None: """Update ``conf_update_dict`` using given product type json response""" # providers_config extraction @@ -698,7 +694,7 @@ def conf_update_from_product_type_result( def _get_product_type_metadata_from_single_collection_endpoint( self, product_type: str - ) -> Dict[str, Any]: + ) -> dict[str, Any]: """ retrieves additional product type information from an endpoint returning data for a single collection :param product_type: product type @@ -726,7 +722,7 @@ def query( self, prep: PreparedSearch = PreparedSearch(), **kwargs: Any, - ) -> Tuple[List[EOProduct], Optional[int]]: + ) -> tuple[list[EOProduct], Optional[int]]: """Perform a search on an OpenSearch-like interface :param prep: Object collecting needed information for search. @@ -806,14 +802,14 @@ def query( reason="Simply run `self.config.metadata_mapping.update(metadata_mapping)` instead", version="2.10.0", ) - def update_metadata_mapping(self, metadata_mapping: Dict[str, Any]) -> None: + def update_metadata_mapping(self, metadata_mapping: dict[str, Any]) -> None: """Update plugin metadata_mapping with input metadata_mapping configuration""" if self.config.metadata_mapping: self.config.metadata_mapping.update(metadata_mapping) def build_query_string( self, product_type: str, **kwargs: Any - ) -> Tuple[Dict[str, Any], str]: + ) -> tuple[dict[str, Any], str]: """Build The query string using the search parameters""" logger.debug("Building the query string that will be used for search") query_params = format_query_params(product_type, self.config, kwargs) @@ -832,7 +828,7 @@ def collect_search_urls( self, prep: PreparedSearch = PreparedSearch(page=None, items_per_page=None), **kwargs: Any, - ) -> Tuple[List[str], Optional[int]]: + ) -> tuple[list[str], Optional[int]]: """Build paginated urls""" page = prep.page items_per_page = prep.items_per_page @@ -901,7 +897,7 @@ def collect_search_urls( def do_search( self, prep: PreparedSearch = PreparedSearch(items_per_page=None), **kwargs: Any - ) -> List[Any]: + ) -> list[Any]: """Perform the actual search request. If there is a specified number of items per page, return the results as soon @@ -918,7 +914,7 @@ def do_search( "total_items_nb_key_path" ] - results: List[Any] = [] + results: list[Any] = [] for search_url in prep.search_urls: single_search_prep = copy_copy(prep) single_search_prep.url = search_url @@ -1069,14 +1065,14 @@ def do_search( def normalize_results( self, results: RawSearchResult, **kwargs: Any - ) -> List[EOProduct]: + ) -> list[EOProduct]: """Build EOProducts from provider results""" normalize_remaining_count = len(results) logger.debug( "Adapting %s plugin results to eodag product representation" % normalize_remaining_count ) - products: List[EOProduct] = [] + products: list[EOProduct] = [] for result in results: product = EOProduct( self.provider, @@ -1134,7 +1130,7 @@ def count_hits(self, count_url: str, result_type: Optional[str] = "json") -> int total_results = int(count_results) return total_results - def get_collections(self, prep: PreparedSearch, **kwargs: Any) -> Tuple[str, ...]: + def get_collections(self, prep: PreparedSearch, **kwargs: Any) -> tuple[str, ...]: """Get the collection to which the product belongs""" # See https://earth.esa.int/web/sentinel/missions/sentinel-2/news/- # /asset_publisher/Ac0d/content/change-of @@ -1145,7 +1141,7 @@ def get_collections(self, prep: PreparedSearch, **kwargs: Any) -> Tuple[str, ... not hasattr(prep, "product_type_def_params") or not prep.product_type_def_params ): - collections: Set[str] = set() + collections: set[str] = set() collection = getattr(self.config, "collection", None) if collection is None: try: @@ -1200,7 +1196,7 @@ def _request( ssl_ctx = get_ssl_context(ssl_verify) # auth if needed - kwargs: Dict[str, Any] = {} + kwargs: dict[str, Any] = {} if ( getattr(self.config, "need_auth", False) and hasattr(prep, "auth") @@ -1331,7 +1327,7 @@ def __init__(self, provider: str, config: PluginConfig) -> None: def do_search( self, prep: PreparedSearch = PreparedSearch(), **kwargs: Any - ) -> List[Any]: + ) -> list[Any]: """A two step search can be performed if the metadata are not given into the search result""" if getattr(self.config, "per_product_metadata_query", False): @@ -1366,7 +1362,7 @@ def do_search( else: return super(ODataV4Search, self).do_search(prep, **kwargs) - def get_metadata_search_url(self, entity: Dict[str, Any]) -> str: + def get_metadata_search_url(self, entity: dict[str, Any]) -> str: """Build the metadata link for the given entity""" return "{}({})/Metadata".format( self.config.api_endpoint.rstrip("/"), entity["id"] @@ -1374,7 +1370,7 @@ def get_metadata_search_url(self, entity: Dict[str, Any]) -> str: def normalize_results( self, results: RawSearchResult, **kwargs: Any - ) -> List[EOProduct]: + ) -> list[EOProduct]: """Build EOProducts from provider results If configured, a metadata pre-mapping can be applied to simplify further metadata extraction. @@ -1431,7 +1427,7 @@ class PostJsonSearch(QueryStringSearch): """ def _get_default_end_date_from_start_date( - self, start_datetime: str, product_type_conf: Dict[str, Any] + self, start_datetime: str, product_type_conf: dict[str, Any] ) -> str: try: start_date = datetime.fromisoformat(start_datetime) @@ -1449,7 +1445,7 @@ def _get_default_end_date_from_start_date( return self.get_product_type_cfg_value("missionEndDate", today().isoformat()) def _check_date_params( - self, keywords: Dict[str, Any], product_type: Optional[str] + self, keywords: dict[str, Any], product_type: Optional[str] ) -> None: """checks if start and end date are present in the keywords and adds them if not""" if ( @@ -1510,7 +1506,7 @@ def query( self, prep: PreparedSearch = PreparedSearch(), **kwargs: Any, - ) -> Tuple[List[EOProduct], Optional[int]]: + ) -> tuple[list[EOProduct], Optional[int]]: """Perform a search on an OpenSearch-like interface""" product_type = kwargs.get("productType", "") count = prep.count @@ -1641,7 +1637,7 @@ def _request(self, *x, **y): def normalize_results( self, results: RawSearchResult, **kwargs: Any - ) -> List[EOProduct]: + ) -> list[EOProduct]: """Build EOProducts from provider results""" normalized = super().normalize_results(results, **kwargs) for product in normalized: @@ -1676,12 +1672,12 @@ def collect_search_urls( self, prep: PreparedSearch = PreparedSearch(), **kwargs: Any, - ) -> Tuple[List[str], Optional[int]]: + ) -> tuple[list[str], Optional[int]]: """Adds pagination to query parameters, and auth to url""" page = prep.page items_per_page = prep.items_per_page count = prep.count - urls: List[str] = [] + urls: list[str] = [] total_results = 0 if count else None if "count_endpoint" not in self.config.pagination: @@ -1842,7 +1838,7 @@ def __init__(self, provider: str, config: PluginConfig) -> None: def build_query_string( self, product_type: str, **kwargs: Any - ) -> Tuple[Dict[str, Any], str]: + ) -> tuple[dict[str, Any], str]: """Build The query string using the search parameters""" logger.debug("Building the query string that will be used for search") @@ -1868,7 +1864,7 @@ def quote_via(x: Any, *_args, **_kwargs) -> str: def discover_queryables( self, **kwargs: Any - ) -> Optional[Dict[str, Annotated[Any, FieldInfo]]]: + ) -> Optional[dict[str, Annotated[Any, FieldInfo]]]: """Fetch queryables list from provider using `discover_queryables` conf :param kwargs: additional filters for queryables (`productType` and other search @@ -1964,7 +1960,7 @@ def discover_queryables( return None # convert json results to pydantic model fields - field_definitions: Dict[str, Any] = dict() + field_definitions: dict[str, Any] = dict() for json_param, json_mtd in json_queryables.items(): param = ( get_queryable_from_provider( @@ -1998,6 +1994,6 @@ def __init__(self, provider: str, config: PluginConfig) -> None: def build_query_string( self, product_type: str, **kwargs: Any - ) -> Tuple[Dict[str, Any], str]: + ) -> tuple[dict[str, Any], str]: """Build The query string using the search parameters""" return PostJsonSearch.build_query_string(self, product_type, **kwargs) diff --git a/eodag/plugins/search/static_stac_search.py b/eodag/plugins/search/static_stac_search.py index 639381d00..8da514c54 100644 --- a/eodag/plugins/search/static_stac_search.py +++ b/eodag/plugins/search/static_stac_search.py @@ -18,7 +18,7 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple +from typing import TYPE_CHECKING, Any, Optional from unittest import mock import geojson @@ -90,7 +90,7 @@ def __init__(self, provider: str, config: PluginConfig) -> None: ): self.config.discover_product_types = {} - def discover_product_types(self, **kwargs: Any) -> Optional[Dict[str, Any]]: + def discover_product_types(self, **kwargs: Any) -> Optional[dict[str, Any]]: """Fetch product types list from a static STAC Catalog provider using `discover_product_types` conf :returns: configuration dict containing fetched product types information @@ -127,7 +127,7 @@ def query( self, prep: PreparedSearch = PreparedSearch(), **kwargs: Any, - ) -> Tuple[List[EOProduct], Optional[int]]: + ) -> tuple[list[EOProduct], Optional[int]]: """Perform a search on a static STAC Catalog""" # only return 1 page if pagination is disabled diff --git a/eodag/rest/cache.py b/eodag/rest/cache.py index 2b98a0ed8..35ea1eea7 100644 --- a/eodag/rest/cache.py +++ b/eodag/rest/cache.py @@ -16,7 +16,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -from typing import Any, Callable, Coroutine, Dict, TypeVar, cast +from typing import Any, Callable, Coroutine, TypeVar, cast import orjson from cachetools import LRUCache @@ -48,7 +48,7 @@ async def cached( host_cache_key = f"{cache_key}:{host}" try: - c: Dict[str, Any] = request.app.state.cache + c: dict[str, Any] = request.app.state.cache if cached := c.get(host_cache_key): logger.debug("Cache result hit") diff --git a/eodag/rest/config.py b/eodag/rest/config.py index 0827aa45c..3d5218dc1 100644 --- a/eodag/rest/config.py +++ b/eodag/rest/config.py @@ -18,7 +18,7 @@ from __future__ import annotations from functools import lru_cache -from typing import Annotated, List, Union +from typing import Annotated, Union from pydantic import Field from pydantic.functional_validators import BeforeValidator @@ -28,7 +28,7 @@ from eodag.rest.constants import DEFAULT_MAXSIZE, DEFAULT_TTL -def str2liststr(raw: Union[str, List[str]]) -> List[str]: +def str2liststr(raw: Union[str, list[str]]) -> list[str]: """Convert str to list[str]""" if isinstance(raw, list): return raw @@ -49,7 +49,7 @@ class Settings(BaseSettings): stac_api_landing_id: str = "eodag-stac-api" origin_url_blacklist: Annotated[ - Union[str, List[str]], + Union[str, list[str]], BeforeValidator(str2liststr), Doc( "Hide from clients items assets' alternative URLs starting with URLs from the list" diff --git a/eodag/rest/core.py b/eodag/rest/core.py index ece8a0d31..347d9e228 100644 --- a/eodag/rest/core.py +++ b/eodag/rest/core.py @@ -77,7 +77,7 @@ ) if TYPE_CHECKING: - from typing import Any, Dict, List, Optional, Union + from typing import Any, Optional, Union from fastapi import Request from requests.auth import AuthBase @@ -116,12 +116,12 @@ def get_home_page_content(base_url: str, ipp: Optional[int] = None) -> str: reason="Function internally used by get_home_page_content, also deprecated", version="2.6.1", ) -def format_product_types(product_types: List[Dict[str, Any]]) -> str: +def format_product_types(product_types: list[dict[str, Any]]) -> str: """Format product_types :param product_types: A list of EODAG product types as returned by the core api """ - result: List[str] = [] + result: list[str] = [] for pt in product_types: result.append(f'* *__{pt["ID"]}__*: {pt["abstract"]}') return "\n".join(sorted(result)) @@ -130,7 +130,7 @@ def format_product_types(product_types: List[Dict[str, Any]]) -> str: def search_stac_items( request: Request, search_request: SearchPostRequest, -) -> Dict[str, Any]: +) -> dict[str, Any]: """ Search and retrieve STAC items based on the given search request. @@ -309,8 +309,8 @@ def download_stac_item( def _order_and_update( product: EOProduct, - auth: Union[AuthBase, Dict[str, str], None], - query_args: Dict[str, Any], + auth: Union[AuthBase, dict[str, str], None], + query_args: dict[str, Any], ) -> None: """Order product if needed and update given kwargs with order-status-dict""" if product.properties.get("storageStatus") != ONLINE_STATUS and hasattr( @@ -353,7 +353,7 @@ def _order_and_update( @lru_cache(maxsize=1) -def get_detailled_collections_list() -> List[Dict[str, Any]]: +def get_detailled_collections_list() -> list[dict[str, Any]]: """Returns detailled collections / product_types list as a list of config dicts @@ -370,7 +370,7 @@ async def all_collections( instrument: Optional[str] = None, constellation: Optional[str] = None, datetime: Optional[str] = None, -) -> Dict[str, Any]: +) -> dict[str, Any]: """Build STAC collections :param url: Requested URL @@ -380,7 +380,7 @@ async def all_collections( :returns: Collections dictionary """ - async def _fetch() -> Dict[str, Any]: + async def _fetch() -> dict[str, Any]: stac_collection = StacCollection( url=request.state.url, stac_config=stac_config, @@ -422,7 +422,7 @@ async def _fetch() -> Dict[str, Any]: async def get_collection( request: Request, collection_id: str, provider: Optional[str] = None -) -> Dict[str, Any]: +) -> dict[str, Any]: """Build STAC collection by id :param url: Requested URL @@ -432,7 +432,7 @@ async def get_collection( :returns: Collection dictionary """ - async def _fetch() -> Dict[str, Any]: + async def _fetch() -> dict[str, Any]: stac_collection = StacCollection( url=request.state.url, stac_config=stac_config, @@ -455,7 +455,7 @@ async def get_stac_catalogs( request: Request, url: str, provider: Optional[str] = None, -) -> Dict[str, Any]: +) -> dict[str, Any]: """Build STAC catalog :param url: Requested URL @@ -464,7 +464,7 @@ async def get_stac_catalogs( :returns: Catalog dictionary """ - async def _fetch() -> Dict[str, Any]: + async def _fetch() -> dict[str, Any]: return StacCatalog( url=url, stac_config=stac_config, @@ -523,7 +523,7 @@ def time_interval_overlap(eodag_args: EODAGSearch, catalog: StacCatalog) -> bool @lru_cache(maxsize=1) -def get_stac_conformance() -> Dict[str, str]: +def get_stac_conformance() -> dict[str, str]: """Build STAC conformance :returns: conformance dictionary @@ -540,7 +540,7 @@ def get_stac_api_version() -> str: @lru_cache(maxsize=1) -def get_stac_extension_oseo(url: str) -> Dict[str, str]: +def get_stac_extension_oseo(url: str) -> dict[str, str]: """Build STAC OGC / OpenSearch Extension for EO :param url: Requested URL @@ -571,14 +571,14 @@ async def get_queryables( request: Request, params: QueryablesGetParams, provider: Optional[str] = None, -) -> Dict[str, Any]: +) -> dict[str, Any]: """Fetch the queryable properties for a collection. :param collection_id: The ID of the collection. :returns: A set containing the STAC standardized queryable properties for a collection. """ - async def _fetch() -> Dict[str, Any]: + async def _fetch() -> dict[str, Any]: python_queryables = eodag_api.list_queryables( provider=provider, fetch_providers=False, @@ -589,8 +589,8 @@ async def _fetch() -> Dict[str, Any]: by_alias=True ) - properties: Dict[str, Any] = python_queryables_json["properties"] - required: List[str] = python_queryables_json.get("required") or [] + properties: dict[str, Any] = python_queryables_json["properties"] + required: list[str] = python_queryables_json.get("required") or [] # productType is either simply removed or replaced by collection later. if "productType" in properties: @@ -598,7 +598,7 @@ async def _fetch() -> Dict[str, Any]: if "productType" in required: required.remove("productType") - stac_properties: Dict[str, Any] = {} + stac_properties: dict[str, Any] = {} # get stac default properties to set prefixes stac_item_properties = list(stac_config["item"]["properties"].values()) @@ -687,7 +687,7 @@ def crunch_products( f'Unknown crunch name. Use one of: {", ".join(crunchers.keys())}' ) - cruncher_config: Dict[str, Any] = {} + cruncher_config: dict[str, Any] = {} for config_param in cruncher.config_params: config_param_value = kwargs.get(config_param) if not config_param_value: @@ -720,13 +720,13 @@ def eodag_api_init() -> None: ext_col = StacCollection.ext_stac_collections.get(key) if not ext_col: continue - platform: Union[str, List[str]] = ext_col.get("summaries", {}).get( + platform: Union[str, list[str]] = ext_col.get("summaries", {}).get( "platform" ) - constellation: Union[str, List[str]] = ext_col.get("summaries", {}).get( + constellation: Union[str, list[str]] = ext_col.get("summaries", {}).get( "constellation" ) - processing_level: Union[str, List[str]] = ext_col.get("summaries", {}).get( + processing_level: Union[str, list[str]] = ext_col.get("summaries", {}).get( "processing:level" ) # Check if platform or constellation are lists and join them into a string if they are diff --git a/eodag/rest/errors.py b/eodag/rest/errors.py index 574a3e376..966f28422 100644 --- a/eodag/rest/errors.py +++ b/eodag/rest/errors.py @@ -16,7 +16,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -from typing import Dict, List, Tuple, Union +from typing import Union from fastapi import FastAPI, Request from fastapi.responses import ORJSONResponse @@ -56,16 +56,16 @@ class ResponseSearchError(Exception): """Represent a EODAG search error response""" - def __init__(self, errors: List[Tuple[str, Exception]]) -> None: + def __init__(self, errors: list[tuple[str, Exception]]) -> None: self._errors = errors @property - def errors(self) -> List[Dict[str, Union[str, int]]]: + def errors(self) -> list[dict[str, Union[str, int]]]: """return errors as a list of dict""" - error_list: List[Dict[str, Union[str, int]]] = [] + error_list: list[dict[str, Union[str, int]]] = [] for name, exception in self._errors: - error_dict: Dict[str, Union[str, int]] = { + error_dict: dict[str, Union[str, int]] = { "provider": name, "error": exception.__class__.__name__, } diff --git a/eodag/rest/server.py b/eodag/rest/server.py index 65b76ef95..f623a1ff6 100755 --- a/eodag/rest/server.py +++ b/eodag/rest/server.py @@ -23,15 +23,7 @@ from contextlib import asynccontextmanager from importlib.metadata import version from json import JSONDecodeError -from typing import ( - TYPE_CHECKING, - Any, - AsyncGenerator, - Awaitable, - Callable, - Dict, - Optional, -) +from typing import TYPE_CHECKING, Any, AsyncGenerator, Awaitable, Callable, Optional from fastapi import APIRouter as FastAPIRouter from fastapi import FastAPI, HTTPException, Request @@ -131,7 +123,7 @@ async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]: include_in_schema=False, status_code=200, ) -async def liveness_probe(request: Request) -> Dict[str, bool]: +async def liveness_probe(request: Request) -> dict[str, bool]: "Endpoint meant to be used as liveness probe by deployment platforms" return {"success": True} @@ -139,7 +131,7 @@ async def liveness_probe(request: Request) -> Dict[str, bool]: @router.api_route( methods=["GET", "HEAD"], path="/api", tags=["Capabilities"], include_in_schema=False ) -async def eodag_openapi(request: Request) -> Dict[str, Any]: +async def eodag_openapi(request: Request) -> dict[str, Any]: """Customized openapi""" logger.debug("URL: /api") if app.openapi_schema: diff --git a/eodag/rest/stac.py b/eodag/rest/stac.py index 7f3674c4a..ad91b24e8 100644 --- a/eodag/rest/stac.py +++ b/eodag/rest/stac.py @@ -21,7 +21,7 @@ import os from collections import defaultdict from datetime import datetime, timezone -from typing import TYPE_CHECKING, Any, Dict, List, Optional +from typing import TYPE_CHECKING, Any, Optional from urllib.parse import ( parse_qs, quote, @@ -120,7 +120,7 @@ class StacCommon: def __init__( self, url: str, - stac_config: Dict[str, Any], + stac_config: dict[str, Any], provider: Optional[str], eodag_api: EODataAccessGateway, root: str = "/", @@ -131,9 +131,9 @@ def __init__( self.eodag_api = eodag_api self.root = root.rstrip("/") if len(root) > 1 else root - self.data: Dict[str, Any] = {} + self.data: dict[str, Any] = {} - def update_data(self, data: Dict[str, Any]) -> None: + def update_data(self, data: dict[str, Any]) -> None: """Updates data using given input STAC dict data :param data: Catalog data (parsed STAC dict) @@ -167,8 +167,8 @@ def apply_method_ids(k, v): @staticmethod def get_stac_extension( - url: str, stac_config: Dict[str, Any], extension: str, **kwargs: Any - ) -> Dict[str, str]: + url: str, stac_config: dict[str, Any], extension: str, **kwargs: Any + ) -> dict[str, str]: """Parse STAC extension from config and return as dict :param url: Requested URL @@ -187,7 +187,7 @@ def get_stac_extension( } return format_dict_items(extension_model, **format_args) - def get_provider_dict(self, provider: str) -> Dict[str, Any]: + def get_provider_dict(self, provider: str) -> dict[str, Any]: """Generate STAC provider dict""" provider_config = next( p @@ -216,7 +216,7 @@ class StacItem(StacCommon): def __init__( self, url: str, - stac_config: Dict[str, Any], + stac_config: dict[str, Any], provider: Optional[str], eodag_api: EODataAccessGateway, root: str = "/", @@ -230,8 +230,8 @@ def __init__( ) def __get_item_list( - self, search_results: SearchResult, catalog: Dict[str, Any] - ) -> List[Dict[str, Any]]: + self, search_results: SearchResult, catalog: dict[str, Any] + ) -> list[dict[str, Any]]: """Build STAC items list from EODAG search results :param search_results: EODAG search results @@ -246,7 +246,7 @@ def __get_item_list( ) # check if some items need to be converted - need_conversion: Dict[str, Any] = {} + need_conversion: dict[str, Any] = {} for k, v in item_model["properties"].items(): if isinstance(v, str): conversion, item_model["properties"][k] = get_metadata_path( @@ -266,11 +266,11 @@ def __get_item_list( ] ignored_props = COLLECTION_PROPERTIES + item_props + IGNORED_ITEM_PROPERTIES - item_list: List[Dict[str, Any]] = [] + item_list: list[dict[str, Any]] = [] for product in search_results: product_dict = deepcopy(product.__dict__) - product_item: Dict[str, Any] = jsonpath_parse_dict_items( + product_item: dict[str, Any] = jsonpath_parse_dict_items( item_model, { "product": product_dict, @@ -372,10 +372,10 @@ def _get_assets( product: EOProduct, downloadlink_href: str, without_arg_url: str, - query_dict: Optional[Dict[str, Any]] = None, + query_dict: Optional[dict[str, Any]] = None, _dc_qs: Optional[str] = None, - ) -> Dict[str, Any]: - assets: Dict[str, Any] = {} + ) -> dict[str, Any]: + assets: dict[str, Any] = {} settings = Settings.from_environment() if _dc_qs: @@ -454,9 +454,9 @@ def get_stac_items( self, search_results: SearchResult, total: int, - catalog: Dict[str, Any], - next_link: Optional[Dict[str, Any]], - ) -> Dict[str, Any]: + catalog: dict[str, Any], + next_link: Optional[dict[str, Any]], + ) -> dict[str, Any]: """Build STAC items from EODAG search results :param search_results: EODAG search results @@ -505,8 +505,8 @@ def get_stac_items( return self.data def __filter_item_model_properties( - self, item_model: Dict[str, Any], product_type: str - ) -> Dict[str, Any]: + self, item_model: dict[str, Any], product_type: str + ) -> dict[str, Any]: """Filter item model depending on product type metadata and its extensions. Removes not needed parameters, and adds supplementary ones as part of oseo extension. @@ -570,13 +570,13 @@ def __filter_item_model_properties( return result_item_model - def __filter_item_properties_values(self, item: Dict[str, Any]) -> Dict[str, Any]: + def __filter_item_properties_values(self, item: dict[str, Any]) -> dict[str, Any]: """Removes empty properties, unused extensions, and add missing extensions :param item: STAC item data :returns: Filtered item model """ - all_extensions_dict: Dict[str, str] = deepcopy( + all_extensions_dict: dict[str, str] = deepcopy( self.stac_config["stac_extensions"] ) # parse f-strings with root @@ -601,7 +601,7 @@ def __filter_item_properties_values(self, item: Dict[str, Any]) -> Dict[str, Any return item - def get_stac_item_from_product(self, product: EOProduct) -> Dict[str, Any]: + def get_stac_item_from_product(self, product: EOProduct) -> dict[str, Any]: """Build STAC item from EODAG product :param product: EODAG product @@ -661,7 +661,7 @@ class StacCollection(StacCommon): """ # External STAC collections - ext_stac_collections: Dict[str, Dict[str, Any]] = dict() + ext_stac_collections: dict[str, dict[str, Any]] = dict() @classmethod def fetch_external_stac_collections(cls, eodag_api: EODataAccessGateway) -> None: @@ -690,7 +690,7 @@ def fetch_external_stac_collections(cls, eodag_api: EODataAccessGateway) -> None def __init__( self, url: str, - stac_config: Dict[str, Any], + stac_config: dict[str, Any], provider: Optional[str], eodag_api: EODataAccessGateway, root: str = "/", @@ -703,7 +703,7 @@ def __init__( root=root, ) - def __list_product_type_providers(self, product_type: Dict[str, Any]) -> List[str]: + def __list_product_type_providers(self, product_type: dict[str, Any]) -> list[str]: """Retrieve a list of providers for a given product type. :param product_type: Dictionary containing information about the product type. @@ -720,8 +720,8 @@ def __list_product_type_providers(self, product_type: Dict[str, Any]) -> List[st ] def __generate_stac_collection( - self, collection_model: Any, product_type: Dict[str, Any] - ) -> Dict[str, Any]: + self, collection_model: Any, product_type: dict[str, Any] + ) -> dict[str, Any]: """Generate a STAC collection dictionary for a given product type. :param collection_model: The base model for the STAC collection. @@ -730,7 +730,7 @@ def __generate_stac_collection( """ providers = self.__list_product_type_providers(product_type) - providers_dict: Dict[str, Dict[str, Any]] = {} + providers_dict: dict[str, dict[str, Any]] = {} for provider in providers: p_dict = self.get_provider_dict(provider) providers_dict.setdefault(p_dict["name"], p_dict) @@ -817,7 +817,7 @@ def get_collection_list( instrument: Optional[str] = None, constellation: Optional[str] = None, datetime: Optional[str] = None, - ) -> List[Dict[str, Any]]: + ) -> list[dict[str, Any]]: """Build STAC collections list :param filters: (optional) Additional filters for collections search @@ -852,7 +852,7 @@ def get_collection_list( product_types = all_pt # list product types with all metadata using guessed ids - collection_list: List[Dict[str, Any]] = [] + collection_list: list[dict[str, Any]] = [] for product_type in product_types: stac_collection = self.__generate_stac_collection( collection_model, product_type @@ -876,7 +876,7 @@ class StacCatalog(StacCommon): def __init__( self, url: str, - stac_config: Dict[str, Any], + stac_config: dict[str, Any], provider: Optional[str], eodag_api: EODataAccessGateway, root: str = "/", @@ -892,8 +892,8 @@ def __init__( self.data = {} self.shp_location_config = eodag_api.locations_config - self.search_args: Dict[str, Any] = {} - self.children: List[Dict[str, Any]] = [] + self.search_args: dict[str, Any] = {} + self.children: list[dict[str, Any]] = [] self.catalog_config = deepcopy(stac_config["catalog"]) @@ -909,7 +909,7 @@ def __init__( # build catalog self.__build_stac_catalog(collection) - def __update_data_from_catalog_config(self, catalog_config: Dict[str, Any]) -> bool: + def __update_data_from_catalog_config(self, catalog_config: dict[str, Any]) -> bool: """Updates configuration and data using given input catalog config :param catalog_config: Catalog config, from yml stac_config[catalogs] @@ -961,7 +961,7 @@ def __build_stac_catalog(self, collection: Optional[str] = None) -> StacCatalog: def set_stac_product_type_by_id( self, product_type: str, **_: Any - ) -> Dict[str, Any]: + ) -> dict[str, Any]: """Updates catalog with given product_type :param product_type: Product type @@ -993,7 +993,7 @@ def set_stac_product_type_by_id( format_args["catalog"] = defaultdict(str, **self.data) format_args["collection"] = collections[0] try: - parsed_dict: Dict[str, Any] = format_dict_items(cat_model, **format_args) + parsed_dict: dict[str, Any] = format_dict_items(cat_model, **format_args) except Exception: logger.error("Could not format product_type catalog") raise diff --git a/eodag/rest/types/collections_search.py b/eodag/rest/types/collections_search.py index ddbc62228..a49a4b297 100644 --- a/eodag/rest/types/collections_search.py +++ b/eodag/rest/types/collections_search.py @@ -15,7 +15,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Dict, Optional +from typing import Any, Optional from pydantic import ( BaseModel, @@ -39,6 +39,6 @@ class CollectionsSearchRequest(BaseModel): constellation: Optional[str] = Field(default=None) @model_serializer(mode="wrap") - def _serialize(self, handler: SerializerFunctionWrapHandler) -> Dict[str, Any]: - dumped: Dict[str, Any] = handler(self) + def _serialize(self, handler: SerializerFunctionWrapHandler) -> dict[str, Any]: + dumped: dict[str, Any] = handler(self) return {EODAGSearch.to_eodag(k): v for k, v in dumped.items()} diff --git a/eodag/rest/types/eodag_search.py b/eodag/rest/types/eodag_search.py index 5bdae7643..11a4a6f3d 100644 --- a/eodag/rest/types/eodag_search.py +++ b/eodag/rest/types/eodag_search.py @@ -17,7 +17,7 @@ # limitations under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union, cast +from typing import TYPE_CHECKING, Any, Optional, Union, cast from pydantic import ( AliasChoices, @@ -52,7 +52,7 @@ from typing_extensions import Self Geometry = Union[ - Dict[str, Any], + dict[str, Any], Point, MultiPoint, LineString, @@ -73,8 +73,8 @@ class EODAGSearch(BaseModel): productType: Optional[str] = Field(None, alias="collections", validate_default=True) provider: Optional[str] = Field(None) - ids: Optional[List[str]] = Field(None) - id: Optional[List[str]] = Field( + ids: Optional[list[str]] = Field(None) + id: Optional[list[str]] = Field( None, alias="ids" ) # TODO: remove when updating queryables geom: Optional[Geometry] = Field(None, alias="geometry") @@ -101,7 +101,7 @@ class EODAGSearch(BaseModel): orbitNumber: Optional[int] = Field(None, alias="sat:absolute_orbit") # TODO: colision in property name. Need to handle "sar:product_type" sensorMode: Optional[str] = Field(None, alias="sar:instrument_mode") - polarizationChannels: Optional[List[str]] = Field(None, alias="sar:polarizations") + polarizationChannels: Optional[list[str]] = Field(None, alias="sar:polarizations") dopplerFrequency: Optional[str] = Field(None, alias="sar:frequency_band") doi: Optional[str] = Field(None, alias="sci:doi") illuminationElevationAngle: Optional[float] = Field( @@ -110,10 +110,10 @@ class EODAGSearch(BaseModel): illuminationAzimuthAngle: Optional[float] = Field(None, alias="view:sun_azimuth") page: Optional[int] = Field(1) items_per_page: int = Field(DEFAULT_ITEMS_PER_PAGE, alias="limit") - sort_by: Optional[List[Tuple[str, str]]] = Field(None, alias="sortby") + sort_by: Optional[list[tuple[str, str]]] = Field(None, alias="sortby") raise_errors: bool = False - _to_eodag_map: Dict[str, str] + _to_eodag_map: dict[str, str] @model_validator(mode="after") def remove_timeFromAscendingNode(self) -> Self: # pylint: disable=invalid-name @@ -129,7 +129,7 @@ def parse_extra_fields(self) -> Self: if not self.__pydantic_extra__: return self - keys_to_update: Dict[str, str] = {} + keys_to_update: dict[str, str] = {} for key in self.__pydantic_extra__.keys(): if key.startswith("unk:"): keys_to_update[key] = key[len("unk:") :] @@ -145,7 +145,7 @@ def parse_extra_fields(self) -> Self: @model_validator(mode="before") @classmethod - def remove_keys(cls, values: Dict[str, Any]) -> Dict[str, Any]: + def remove_keys(cls, values: dict[str, Any]) -> dict[str, Any]: """Remove 'datetime', 'crunch', 'intersects', and 'bbox' keys""" for key in ["datetime", "crunch", "intersects", "bbox", "filter_lang"]: values.pop(key, None) @@ -154,8 +154,8 @@ def remove_keys(cls, values: Dict[str, Any]) -> Dict[str, Any]: @model_validator(mode="before") @classmethod def parse_collections( - cls, values: Dict[str, Any], info: ValidationInfo - ) -> Dict[str, Any]: + cls, values: dict[str, Any], info: ValidationInfo + ) -> dict[str, Any]: """convert collections to productType""" if collections := values.pop("collections", None): @@ -172,7 +172,7 @@ def parse_collections( @model_validator(mode="before") @classmethod - def parse_query(cls, values: Dict[str, Any]) -> Dict[str, Any]: + def parse_query(cls, values: dict[str, Any]) -> dict[str, Any]: """ Convert a STAC query parameter filter with the "eq" operator to a dict. """ @@ -190,9 +190,9 @@ def add_error(error_message: str, input: Any) -> None: if not query: return values - query_props: Dict[str, Any] = {} - errors: List[InitErrorDetails] = [] - for property_name, conditions in cast(Dict[str, Any], query).items(): + query_props: dict[str, Any] = {} + errors: list[InitErrorDetails] = [] + for property_name, conditions in cast(dict[str, Any], query).items(): # Remove the prefix "properties." if present prop = property_name.replace("properties.", "", 1) @@ -205,7 +205,7 @@ def add_error(error_message: str, input: Any) -> None: continue # Retrieve the operator and its value - operator, value = next(iter(cast(Dict[str, Any], conditions).items())) + operator, value = next(iter(cast(dict[str, Any], conditions).items())) # Validate the operator # only eq, in and lte are allowed @@ -239,7 +239,7 @@ def add_error(error_message: str, input: Any) -> None: @model_validator(mode="before") @classmethod - def parse_cql(cls, values: Dict[str, Any]) -> Dict[str, Any]: + def parse_cql(cls, values: dict[str, Any]) -> dict[str, Any]: """ Process cql2 filter """ @@ -256,7 +256,7 @@ def add_error(error_message: str) -> None: if not filter_: return values - errors: List[InitErrorDetails] = [] + errors: list[InitErrorDetails] = [] try: parsing_result = EodagEvaluator().evaluate(parse_json(filter_)) # type: ignore except (ValueError, NotImplementedError) as e: @@ -271,7 +271,7 @@ def add_error(error_message: str) -> None: title=cls.__name__, line_errors=errors ) - cql_args: Dict[str, Any] = cast(Dict[str, Any], parsing_result) + cql_args: dict[str, Any] = cast(dict[str, Any], parsing_result) invalid_keys = { "collections": 'Use "collection" instead of "collections"', @@ -298,7 +298,7 @@ def add_error(error_message: str) -> None: @field_validator("instrument", mode="before") @classmethod - def join_instruments(cls, v: Union[str, List[str]]) -> str: + def join_instruments(cls, v: Union[str, list[str]]) -> str: """convert instruments to instrument""" if isinstance(v, list): return ",".join(v) @@ -308,8 +308,8 @@ def join_instruments(cls, v: Union[str, List[str]]) -> str: @classmethod def parse_sortby( cls, - sortby_post_params: List[Dict[str, str]], - ) -> List[Tuple[str, str]]: + sortby_post_params: list[dict[str, str]], + ) -> list[tuple[str, str]]: """ Convert STAC POST sortby to EODAG sort_by """ @@ -363,7 +363,7 @@ def to_eodag(cls, value: str) -> str: def to_stac( cls, field_name: str, - stac_item_properties: Optional[List[str]] = None, + stac_item_properties: Optional[list[str]] = None, provider: Optional[str] = None, ) -> str: """Get the alias of a field in a Pydantic model""" diff --git a/eodag/rest/types/queryables.py b/eodag/rest/types/queryables.py index 9fca8b3c5..1f06dce84 100644 --- a/eodag/rest/types/queryables.py +++ b/eodag/rest/types/queryables.py @@ -17,7 +17,7 @@ # limitations under the License. from __future__ import annotations -from typing import TYPE_CHECKING, Annotated, Any, ClassVar, Dict, List, Optional, Union +from typing import TYPE_CHECKING, Annotated, Any, ClassVar, Optional, Union from pydantic import ( BaseModel, @@ -46,8 +46,8 @@ class QueryablesGetParams(BaseModel): model_config = ConfigDict(extra="allow", frozen=True) @model_serializer(mode="wrap") - def _serialize(self, handler: SerializerFunctionWrapHandler) -> Dict[str, Any]: - dumped: Dict[str, Any] = handler(self) + def _serialize(self, handler: SerializerFunctionWrapHandler) -> dict[str, Any]: + dumped: dict[str, Any] = handler(self) return {EODAGSearch.to_eodag(k): v for k, v in dumped.items()} # use [prop-decorator] mypy error code when mypy==1.12 is released @@ -77,12 +77,12 @@ class StacQueryableProperty(BaseModel): description: str ref: Optional[str] = Field(default=None, serialization_alias="$ref") - type: Optional[Union[str, List[str]]] = None - enum: Optional[List[Any]] = None + type: Optional[Union[str, list[str]]] = None + enum: Optional[list[Any]] = None value: Optional[Any] = None - min: Optional[Union[int, List[Union[int, None]]]] = None - max: Optional[Union[int, List[Union[int, None]]]] = None - oneOf: Optional[List[Any]] = None + min: Optional[Union[int, list[Union[int, None]]]] = None + max: Optional[Union[int, list[Union[int, None]]]] = None + oneOf: Optional[list[Any]] = None items: Optional[Any] = None @classmethod @@ -104,7 +104,7 @@ def remove_none( _: SerializationInfo, ): """Remove none value property fields during serialization""" - props: Dict[str, Any] = handler(self) + props: dict[str, Any] = handler(self) return {k: v for k, v in props.items() if v is not None} @@ -130,13 +130,13 @@ class StacQueryables(BaseModel): description: str = Field( default="Queryable names for the EODAG STAC API Item Search filter." ) - default_properties: ClassVar[Dict[str, StacQueryableProperty]] = { + default_properties: ClassVar[dict[str, StacQueryableProperty]] = { "collection": StacQueryableProperty( description="Collection", ref="https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/item.json#/collection", ) } - possible_properties: ClassVar[Dict[str, StacQueryableProperty]] = { + possible_properties: ClassVar[dict[str, StacQueryableProperty]] = { "geometry": StacQueryableProperty( description="Geometry", ref="https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/item.json#/geometry", @@ -152,8 +152,8 @@ class StacQueryables(BaseModel): items={"type": "number"}, ), } - properties: Dict[str, Any] = Field() - required: Optional[List[str]] = Field(None) + properties: dict[str, Any] = Field() + required: Optional[list[str]] = Field(None) additional_properties: bool = Field( default=True, serialization_alias="additionalProperties" ) diff --git a/eodag/rest/types/stac_search.py b/eodag/rest/types/stac_search.py index 5f151f0c2..1f7869cb0 100644 --- a/eodag/rest/types/stac_search.py +++ b/eodag/rest/types/stac_search.py @@ -19,17 +19,7 @@ from __future__ import annotations -from typing import ( - TYPE_CHECKING, - Annotated, - Any, - Dict, - List, - Literal, - Optional, - Tuple, - Union, -) +from typing import TYPE_CHECKING, Annotated, Any, Literal, Optional, Union import geojson from pydantic import ( @@ -63,8 +53,8 @@ NumType = Union[float, int] BBox = Union[ - Tuple[NumType, NumType, NumType, NumType], - Tuple[NumType, NumType, NumType, NumType, NumType, NumType], + tuple[NumType, NumType, NumType, NumType], + tuple[NumType, NumType, NumType, NumType, NumType, NumType], ] Geometry = Union[ @@ -106,8 +96,8 @@ class which describes the body of a search request model_config = ConfigDict(populate_by_name=True, arbitrary_types_allowed=True) provider: Optional[str] = None - collections: Optional[List[str]] = None - ids: Optional[List[str]] = None + collections: Optional[list[str]] = None + ids: Optional[list[str]] = None bbox: Optional[BBox] = None intersects: Optional[Geometry] = None datetime: Optional[str] = None @@ -117,21 +107,21 @@ class which describes the body of a search request page: Optional[PositiveInt] = Field( # type: ignore default=None, description="Page number, must be a positive integer." ) - query: Optional[Dict[str, Any]] = None - filter: Optional[Dict[str, Any]] = None + query: Optional[dict[str, Any]] = None + filter: Optional[dict[str, Any]] = None filter_lang: Optional[str] = Field( default=None, alias="filter-lang", description="The language used for filtering.", validate_default=True, ) - sortby: Optional[List[SortBy]] = None + sortby: Optional[list[SortBy]] = None crunch: Optional[str] = None @field_serializer("intersects") def serialize_intersects( self, intersects: Optional[Geometry] - ) -> Optional[Dict[str, Any]]: + ) -> Optional[dict[str, Any]]: """Serialize intersects from shapely to a proper dict""" if intersects: return geojson.loads(geojson.dumps(intersects)) # type: ignore @@ -150,7 +140,7 @@ def check_filter_lang(self) -> Self: @model_validator(mode="before") @classmethod - def only_one_spatial(cls, values: Dict[str, Any]) -> Dict[str, Any]: + def only_one_spatial(cls, values: dict[str, Any]) -> dict[str, Any]: """Check bbox and intersects are not both supplied.""" if "intersects" in values and "bbox" in values: raise ValueError("intersects and bbox parameters are mutually exclusive") @@ -170,7 +160,7 @@ def end_date(self) -> Optional[str]: @field_validator("ids", "collections", mode="before") @classmethod - def str_to_str_list(cls, v: Union[str, List[str]]) -> List[str]: + def str_to_str_list(cls, v: Union[str, list[str]]) -> list[str]: """Convert ids and collections strings to list of strings""" if isinstance(v, str): return [i.strip() for i in v.split(",")] @@ -178,7 +168,7 @@ def str_to_str_list(cls, v: Union[str, List[str]]) -> List[str]: @field_validator("intersects", mode="before") @classmethod - def validate_intersects(cls, v: Union[Dict[str, Any], Geometry]) -> Geometry: + def validate_intersects(cls, v: Union[dict[str, Any], Geometry]) -> Geometry: """Verify format of intersects""" if isinstance(v, BaseGeometry): return v @@ -224,7 +214,7 @@ def validate_datetime(cls, v: str) -> str: # Single date is interpreted as end date values = ["..", v] - dates: List[str] = [] + dates: list[str] = [] for value in values: if value == ".." or value == "": dates.append("..") @@ -267,13 +257,13 @@ def spatial_filter(self) -> Optional[Geometry]: def sortby2list( v: Optional[str], -) -> Optional[List[SortBy]]: +) -> Optional[list[SortBy]]: """ Convert sortby filter parameter GET syntax to POST syntax """ if not v: return None - sortby: List[SortBy] = [] + sortby: list[SortBy] = [] for sortby_param in v.split(","): sortby_param = sortby_param.strip() direction: Direction = "desc" if sortby_param.startswith("-") else "asc" diff --git a/eodag/rest/utils/__init__.py b/eodag/rest/utils/__init__.py index d38f623da..864a0e410 100644 --- a/eodag/rest/utils/__init__.py +++ b/eodag/rest/utils/__init__.py @@ -23,17 +23,7 @@ import os from io import BufferedReader from shutil import make_archive, rmtree -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - Iterator, - List, - NamedTuple, - Optional, - Union, -) +from typing import TYPE_CHECKING, Any, Callable, Iterator, NamedTuple, Optional, Union from urllib.parse import unquote_plus, urlencode import orjson @@ -63,7 +53,7 @@ class Cruncher(NamedTuple): """Type hinted Cruncher namedTuple""" clazz: Callable[..., Any] - config_params: List[str] + config_params: list[str] crunchers = { @@ -90,19 +80,19 @@ def format_pydantic_error(e: pydanticValidationError) -> str: def is_dict_str_any(var: Any) -> bool: """Verify whether the variable is of type dict[str, Any]""" - if isinstance(var, Dict): + if isinstance(var, dict): return all(isinstance(k, str) for k in var.keys()) # type: ignore return False -def str2list(v: Optional[str]) -> Optional[List[str]]: +def str2list(v: Optional[str]) -> Optional[list[str]]: """Convert string to list base on , delimiter.""" if v: return v.split(",") return None -def str2json(k: str, v: Optional[str] = None) -> Optional[Dict[str, Any]]: +def str2json(k: str, v: Optional[str] = None) -> Optional[dict[str, Any]]: """decoding a URL parameter and then parsing it as JSON.""" if not v: return None @@ -112,25 +102,25 @@ def str2json(k: str, v: Optional[str] = None) -> Optional[Dict[str, Any]]: raise ValidationError(f"{k}: Incorrect JSON object") from e -def flatten_list(nested_list: Union[Any, List[Any]]) -> List[Any]: +def flatten_list(nested_list: Union[Any, list[Any]]) -> list[Any]: """Flatten a nested list structure into a single list.""" if not isinstance(nested_list, list): return [nested_list] else: - flattened: List[Any] = [] + flattened: list[Any] = [] for element in nested_list: flattened.extend(flatten_list(element)) return flattened -def list_to_str_list(input_list: List[Any]) -> List[str]: +def list_to_str_list(input_list: list[Any]) -> list[str]: """Attempt to convert a list of any type to a list of strings.""" try: # Try to convert each element to a string return [str(element) for element in input_list] except Exception as e: # Raise an exception if any element cannot be converted - raise TypeError(f"Failed to convert to List[str]: {e}") from e + raise TypeError(f"Failed to convert to list[str]: {e}") from e def get_next_link( @@ -138,7 +128,7 @@ def get_next_link( search_request: SearchPostRequest, total_results: Optional[int], items_per_page: int, -) -> Optional[Dict[str, Any]]: +) -> Optional[dict[str, Any]]: """Generate next link URL and body""" body = search_request.model_dump(exclude_none=True) if "bbox" in body: @@ -159,7 +149,7 @@ def get_next_link( params["page"] = str(page + 1) url += f"?{urlencode(params)}" - next: Dict[str, Any] = { + next: dict[str, Any] = { "rel": "next", "href": url, "title": "Next page", diff --git a/eodag/rest/utils/cql_evaluate.py b/eodag/rest/utils/cql_evaluate.py index b52de2fd4..7118f596d 100644 --- a/eodag/rest/utils/cql_evaluate.py +++ b/eodag/rest/utils/cql_evaluate.py @@ -16,13 +16,13 @@ # See the License for the specific language governing permissions and # limitations under the License. from datetime import datetime as dt -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import Any, Optional, Union from pygeofilter import ast from pygeofilter.backends.evaluator import Evaluator, handle from pygeofilter.values import Geometry, Interval -simpleNode = Union[ast.Attribute, str, int, complex, float, List[Any], Tuple[Any, ...]] +simpleNode = Union[ast.Attribute, str, int, complex, float, list[Any], tuple[Any, ...]] class EodagEvaluator(Evaluator): @@ -36,7 +36,7 @@ def attribute(self, node: simpleNode, *_) -> simpleNode: return node @handle(Geometry) - def spatial(self, node: Geometry) -> Dict[str, Any]: + def spatial(self, node: Geometry) -> dict[str, Any]: """handle geometry""" return node.geometry @@ -46,7 +46,7 @@ def temporal(self, node: dt) -> str: return node.strftime("%Y-%m-%dT%H:%M:%SZ") @handle(Interval) - def interval(self, _, *interval: Any) -> List[Any]: + def interval(self, _, *interval: Any) -> list[Any]: """handle datetime interval""" return list(interval) @@ -60,7 +60,7 @@ def interval(self, _, *interval: Any) -> List[Any]: ) def predicate( self, node: ast.Predicate, lhs: Any, rhs: Any - ) -> Optional[Dict[str, Any]]: + ) -> Optional[dict[str, Any]]: """ Handle predicates Verify the property is first attribute in each predicate @@ -114,6 +114,6 @@ def contains(self, node: ast.In, lhs: Any, *rhs: Any): return {lhs.name: list(rhs)} @handle(ast.And) - def combination(self, _, lhs: Dict[str, str], rhs: Dict[str, str]): + def combination(self, _, lhs: dict[str, str], rhs: dict[str, str]): """handle combinations""" return {**lhs, **rhs} diff --git a/eodag/rest/utils/rfc3339.py b/eodag/rest/utils/rfc3339.py index c1b45b098..4cead7d33 100644 --- a/eodag/rest/utils/rfc3339.py +++ b/eodag/rest/utils/rfc3339.py @@ -16,14 +16,14 @@ # See the License for the specific language governing permissions and # limitations under the License. import datetime -from typing import Optional, Tuple +from typing import Optional from eodag.utils.rest import rfc3339_str_to_datetime def str_to_interval( interval: Optional[str], -) -> Tuple[Optional[datetime.datetime], Optional[datetime.datetime]]: +) -> tuple[Optional[datetime.datetime], Optional[datetime.datetime]]: """Extract a tuple of datetimes from an interval string. Interval strings are defined by diff --git a/eodag/types/__init__.py b/eodag/types/__init__.py index 10e7f9d7b..9f3e01ae0 100644 --- a/eodag/types/__init__.py +++ b/eodag/types/__init__.py @@ -22,11 +22,8 @@ from typing import ( Annotated, Any, - Dict, - List, Literal, Optional, - Tuple, TypedDict, Union, get_args, @@ -42,7 +39,7 @@ # Types mapping from JSON Schema and OpenAPI 3.1.0 specifications to Python # See https://spec.openapis.org/oas/v3.1.0#data-types -JSON_TYPES_MAPPING: Dict[str, type] = { +JSON_TYPES_MAPPING: dict[str, type] = { "boolean": bool, "integer": int, "number": float, @@ -53,7 +50,7 @@ } -def json_type_to_python(json_type: Union[str, List[str]]) -> type: +def json_type_to_python(json_type: Union[str, list[str]]) -> type: """Get python type from json type https://spec.openapis.org/oas/v3.1.0#data-types >>> json_type_to_python("number") @@ -70,7 +67,7 @@ def json_type_to_python(json_type: Union[str, List[str]]) -> type: return type(None) -def _get_min_or_max(type_info: Union[Lt, Gt, Any]) -> Tuple[str, Any]: +def _get_min_or_max(type_info: Union[Lt, Gt, Any]) -> tuple[str, Any]: """Checks if the value from an Annotated object is a minimum or maximum :param type_info: info from Annotated @@ -85,7 +82,7 @@ def _get_min_or_max(type_info: Union[Lt, Gt, Any]) -> Tuple[str, Any]: def _get_type_info_from_annotated( annotated_type: Annotated[type, Any], -) -> Dict[str, Any]: +) -> dict[str, Any]: """Retrieves type information from an annotated object :param annotated_type: annotated object @@ -108,7 +105,7 @@ def _get_type_info_from_annotated( def python_type_to_json( python_type: type, -) -> Optional[Union[str, List[Dict[str, Any]]]]: +) -> Optional[Union[str, list[dict[str, Any]]]]: """Get json type from python https://spec.openapis.org/oas/v3.1.0#data-types >>> python_type_to_json(int) @@ -149,7 +146,7 @@ def python_type_to_json( def json_field_definition_to_python( - json_field_definition: Dict[str, Any], + json_field_definition: dict[str, Any], default_value: Optional[Any] = None, required: Optional[bool] = False, ) -> Annotated[Any, FieldInfo]: @@ -186,7 +183,7 @@ def json_field_definition_to_python( if python_type in (list, set): items = json_field_definition.get("items", None) if isinstance(items, list): - python_type = Tuple[ # type: ignore + python_type = tuple[ # type: ignore tuple( json_field_definition_to_python(item, required=required) for item in items @@ -197,7 +194,7 @@ def json_field_definition_to_python( if enum: literal = Literal[tuple(sorted(enum))] # type: ignore - python_type = List[literal] if python_type in (list, set) else literal # type: ignore + python_type = list[literal] if python_type in (list, set) else literal # type: ignore if "$ref" in json_field_definition: field_type_kwargs["json_schema_extra"] = {"$ref": json_field_definition["$ref"]} @@ -210,7 +207,7 @@ def json_field_definition_to_python( def python_field_definition_to_json( python_field_definition: Annotated[Any, FieldInfo], -) -> Dict[str, Any]: +) -> dict[str, Any]: """Get json field definition from python `typing.Annotated` >>> from pydantic import Field @@ -231,7 +228,7 @@ def python_field_definition_to_json( "%s must be an instance of Annotated" % python_field_definition ) - json_field_definition: Dict[str, Any] = dict() + json_field_definition: dict[str, Any] = dict() python_field_args = get_args(python_field_definition) @@ -311,8 +308,8 @@ def python_field_definition_to_json( def model_fields_to_annotated( - model_fields: Dict[str, FieldInfo], -) -> Dict[str, Annotated[Any, FieldInfo]]: + model_fields: dict[str, FieldInfo], +) -> dict[str, Annotated[Any, FieldInfo]]: """Convert BaseModel.model_fields from FieldInfo to Annotated >>> from pydantic import create_model @@ -326,7 +323,7 @@ def model_fields_to_annotated( :param model_fields: BaseModel.model_fields to convert :returns: Annotated tuple usable as create_model argument """ - annotated_model_fields: Dict[str, Annotated[Any, FieldInfo]] = dict() + annotated_model_fields: dict[str, Annotated[Any, FieldInfo]] = dict() for param, field_info in model_fields.items(): field_type = field_info.annotation or type(None) new_field_info = copy_deepcopy(field_info) @@ -336,7 +333,7 @@ def model_fields_to_annotated( def annotated_dict_to_model( - model_name: str, annotated_fields: Dict[str, Annotated[Any, FieldInfo]] + model_name: str, annotated_fields: dict[str, Annotated[Any, FieldInfo]] ) -> BaseModel: """Convert a dictionary of Annotated values to a Pydantic BaseModel. @@ -364,5 +361,14 @@ class ProviderSortables(TypedDict): :param max_sort_params: (optional) The allowed maximum number of sortable(s) in a search request with the provider """ - sortables: List[str] + sortables: list[str] max_sort_params: Annotated[Optional[int], Gt(0)] + + +class S3SessionKwargs(TypedDict, total=False): + """A class representing available keyword arguments to pass to :class:`boto3.session.Session` for authentication""" + + aws_access_key_id: Optional[str] + aws_secret_access_key: Optional[str] + aws_session_token: Optional[str] + profile_name: Optional[str] diff --git a/eodag/types/bbox.py b/eodag/types/bbox.py index 5d79ec2c2..daea6b486 100644 --- a/eodag/types/bbox.py +++ b/eodag/types/bbox.py @@ -15,14 +15,14 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Dict, List, Tuple, Union +from typing import Union from pydantic import BaseModel, ValidationInfo, field_validator from shapely.geometry.polygon import Polygon NumType = Union[float, int] BBoxArgs = Union[ - List[NumType], Tuple[NumType, NumType, NumType, NumType], Dict[str, NumType] + list[NumType], tuple[NumType, NumType, NumType, NumType], dict[str, NumType] ] diff --git a/eodag/types/download_args.py b/eodag/types/download_args.py index a4d1e4994..1b09a8609 100644 --- a/eodag/types/download_args.py +++ b/eodag/types/download_args.py @@ -17,7 +17,7 @@ # limitations under the License. from __future__ import annotations -from typing import Dict, Optional, TypedDict, Union +from typing import Optional, TypedDict, Union class DownloadConf(TypedDict, total=False): @@ -35,6 +35,6 @@ class DownloadConf(TypedDict, total=False): output_dir: str output_extension: Union[str, None] extract: bool - dl_url_params: Dict[str, str] + dl_url_params: dict[str, str] delete_archive: bool asset: Optional[str] diff --git a/eodag/types/search_args.py b/eodag/types/search_args.py index 96e998500..ae915ad57 100644 --- a/eodag/types/search_args.py +++ b/eodag/types/search_args.py @@ -17,7 +17,7 @@ # limitations under the License. import re from datetime import datetime -from typing import Annotated, Any, Dict, List, Optional, Tuple, Union, cast +from typing import Annotated, Any, Optional, Union, cast from annotated_types import MinLen from pydantic import BaseModel, ConfigDict, Field, conint, field_validator @@ -31,10 +31,10 @@ from eodag.utils.exceptions import ValidationError NumType = Union[float, int] -GeomArgs = Union[List[NumType], Tuple[NumType], Dict[str, NumType], str, BaseGeometry] +GeomArgs = Union[list[NumType], tuple[NumType], dict[str, NumType], str, BaseGeometry] PositiveInt = conint(gt=0) -SortByList = Annotated[List[Tuple[str, str]], MinLen(1)] +SortByList = Annotated[list[tuple[str, str]], MinLen(1)] class SearchArgs(BaseModel): @@ -48,7 +48,7 @@ class SearchArgs(BaseModel): start: Optional[str] = Field(None) end: Optional[str] = Field(None) geom: Optional[BaseGeometry] = Field(None) - locations: Optional[Dict[str, str]] = Field(None) + locations: Optional[dict[str, str]] = Field(None) page: Optional[int] = Field(DEFAULT_PAGE, gt=0) # type: ignore items_per_page: Optional[PositiveInt] = Field(DEFAULT_ITEMS_PER_PAGE) # type: ignore sort_by: Optional[SortByList] = Field(None) # type: ignore diff --git a/eodag/types/whoosh.py b/eodag/types/whoosh.py index 2276695d3..31e4bb885 100644 --- a/eodag/types/whoosh.py +++ b/eodag/types/whoosh.py @@ -15,8 +15,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import List - from whoosh.fields import Schema from whoosh.index import _DEF_INDEX_NAME, FileIndex from whoosh.matching import NullMatcher @@ -52,7 +50,7 @@ class EODAGQueryParser(QueryParser): def __init__( self, - filters: List[str], + filters: list[str], schema: Schema, ): """ diff --git a/eodag/utils/__init__.py b/eodag/utils/__init__.py index 8a9078b8c..afc55bb18 100644 --- a/eodag/utils/__init__.py +++ b/eodag/utils/__init__.py @@ -54,14 +54,10 @@ TYPE_CHECKING, Any, Callable, - Dict, Iterable, Iterator, - List, Mapping, Optional, - Tuple, - Type, Union, cast, ) @@ -322,7 +318,7 @@ def path_to_uri(path: str) -> str: return Path(path).as_uri() -def mutate_dict_in_place(func: Callable[[Any], Any], mapping: Dict[Any, Any]) -> None: +def mutate_dict_in_place(func: Callable[[Any], Any], mapping: dict[Any, Any]) -> None: """Apply func to values of mapping. The mapping object's values are modified in-place. The function is recursive, @@ -340,7 +336,7 @@ def mutate_dict_in_place(func: Callable[[Any], Any], mapping: Dict[Any, Any]) -> mapping[key] = func(value) -def merge_mappings(mapping1: Dict[Any, Any], mapping2: Dict[Any, Any]) -> None: +def merge_mappings(mapping1: dict[Any, Any], mapping2: dict[Any, Any]) -> None: """Merge two mappings with string keys, values from ``mapping2`` overriding values from ``mapping1``. @@ -646,8 +642,8 @@ def rename_with_version(file_path: str, suffix: str = "old") -> str: def format_dict_items( - config_dict: Dict[str, Any], **format_variables: Any -) -> Dict[Any, Any]: + config_dict: dict[str, Any], **format_variables: Any +) -> dict[Any, Any]: r"""Recursively apply :meth:`str.format` to ``**format_variables`` on ``config_dict`` values >>> format_dict_items( @@ -664,8 +660,8 @@ def format_dict_items( def jsonpath_parse_dict_items( - jsonpath_dict: Dict[str, Any], values_dict: Dict[str, Any] -) -> Dict[Any, Any]: + jsonpath_dict: dict[str, Any], values_dict: dict[str, Any] +) -> dict[Any, Any]: """Recursively parse :class:`jsonpath_ng.JSONPath` elements in dict >>> import jsonpath_ng.ext as jsonpath @@ -683,12 +679,12 @@ def jsonpath_parse_dict_items( def update_nested_dict( - old_dict: Dict[Any, Any], - new_dict: Dict[Any, Any], + old_dict: dict[Any, Any], + new_dict: dict[Any, Any], extend_list_values: bool = False, allow_empty_values: bool = False, allow_extend_duplicates: bool = True, -) -> Dict[Any, Any]: +) -> dict[Any, Any]: """Update recursively ``old_dict`` items with ``new_dict`` ones >>> update_nested_dict( @@ -768,10 +764,10 @@ def update_nested_dict( def items_recursive_apply( - input_obj: Union[Dict[Any, Any], List[Any]], + input_obj: Union[dict[Any, Any], list[Any]], apply_method: Callable[..., Any], **apply_method_parameters: Any, -) -> Union[Dict[Any, Any], List[Any]]: +) -> Union[dict[Any, Any], list[Any]]: """Recursive apply method to items contained in input object (dict or list) >>> items_recursive_apply( @@ -809,10 +805,10 @@ def items_recursive_apply( def dict_items_recursive_apply( - config_dict: Dict[Any, Any], + config_dict: dict[Any, Any], apply_method: Callable[..., Any], **apply_method_parameters: Any, -) -> Dict[Any, Any]: +) -> dict[Any, Any]: """Recursive apply method to dict elements >>> dict_items_recursive_apply( @@ -826,7 +822,7 @@ def dict_items_recursive_apply( :param apply_method_parameters: Optional parameters passed to the method :returns: Updated dict """ - result_dict: Dict[Any, Any] = deepcopy(config_dict) + result_dict: dict[Any, Any] = deepcopy(config_dict) for dict_k, dict_v in result_dict.items(): if isinstance(dict_v, dict): result_dict[dict_k] = dict_items_recursive_apply( @@ -845,10 +841,10 @@ def dict_items_recursive_apply( def list_items_recursive_apply( - config_list: List[Any], + config_list: list[Any], apply_method: Callable[..., Any], **apply_method_parameters: Any, -) -> List[Any]: +) -> list[Any]: """Recursive apply method to list elements >>> list_items_recursive_apply( @@ -881,8 +877,8 @@ def list_items_recursive_apply( def items_recursive_sort( - input_obj: Union[List[Any], Dict[Any, Any]], -) -> Union[List[Any], Dict[Any, Any]]: + input_obj: Union[list[Any], dict[Any, Any]], +) -> Union[list[Any], dict[Any, Any]]: """Recursive sort dict items contained in input object (dict or list) >>> items_recursive_sort( @@ -906,7 +902,7 @@ def items_recursive_sort( return input_obj -def dict_items_recursive_sort(config_dict: Dict[Any, Any]) -> Dict[Any, Any]: +def dict_items_recursive_sort(config_dict: dict[Any, Any]) -> dict[Any, Any]: """Recursive sort dict elements >>> dict_items_recursive_sort( @@ -917,7 +913,7 @@ def dict_items_recursive_sort(config_dict: Dict[Any, Any]) -> Dict[Any, Any]: :param config_dict: Input nested dictionary :returns: Updated dict """ - result_dict: Dict[Any, Any] = deepcopy(config_dict) + result_dict: dict[Any, Any] = deepcopy(config_dict) for dict_k, dict_v in result_dict.items(): if isinstance(dict_v, dict): result_dict[dict_k] = dict_items_recursive_sort(dict_v) @@ -929,7 +925,7 @@ def dict_items_recursive_sort(config_dict: Dict[Any, Any]) -> Dict[Any, Any]: return dict(sorted(result_dict.items())) -def list_items_recursive_sort(config_list: List[Any]) -> List[Any]: +def list_items_recursive_sort(config_list: list[Any]) -> list[Any]: """Recursive sort dicts in list elements >>> list_items_recursive_sort(["b", {2: 0, 0: 1, 1: 2}]) @@ -938,7 +934,7 @@ def list_items_recursive_sort(config_list: List[Any]) -> List[Any]: :param config_list: Input list containing nested lists/dicts :returns: Updated list """ - result_list: List[Any] = deepcopy(config_list) + result_list: list[Any] = deepcopy(config_list) for list_idx, list_v in enumerate(result_list): if isinstance(list_v, dict): result_list[list_idx] = dict_items_recursive_sort(list_v) @@ -1064,7 +1060,7 @@ def format_string(key: str, str_to_format: Any, **format_variables: Any) -> Any: def parse_jsonpath( - key: str, jsonpath_obj: Union[str, jsonpath.Child], **values_dict: Dict[str, Any] + key: str, jsonpath_obj: Union[str, jsonpath.Child], **values_dict: dict[str, Any] ) -> Optional[str]: """Parse jsonpah in ``jsonpath_obj`` using ``values_dict`` @@ -1084,7 +1080,7 @@ def parse_jsonpath( return jsonpath_obj -def nested_pairs2dict(pairs: Union[List[Any], Any]) -> Union[Any, Dict[Any, Any]]: +def nested_pairs2dict(pairs: Union[list[Any], Any]) -> Union[Any, dict[Any, Any]]: """Create a dict using nested pairs >>> nested_pairs2dict([["foo", [["bar", "baz"]]]]) @@ -1106,7 +1102,7 @@ def nested_pairs2dict(pairs: Union[List[Any], Any]) -> Union[Any, Dict[Any, Any] def get_geometry_from_various( - locations_config: List[Dict[str, Any]] = [], **query_args: Any + locations_config: list[dict[str, Any]] = [], **query_args: Any ) -> BaseGeometry: """Creates a ``shapely.geometry`` using given query kwargs arguments @@ -1282,7 +1278,7 @@ def _mutable_cached_yaml_load(config_path: str) -> Any: return yaml.load(fh, Loader=yaml.SafeLoader) -def cached_yaml_load(config_path: str) -> Dict[str, Any]: +def cached_yaml_load(config_path: str) -> dict[str, Any]: """Cached :func:`yaml.load` :param config_path: path to the yaml configuration file @@ -1292,12 +1288,12 @@ def cached_yaml_load(config_path: str) -> Dict[str, Any]: @functools.lru_cache() -def _mutable_cached_yaml_load_all(config_path: str) -> List[Any]: +def _mutable_cached_yaml_load_all(config_path: str) -> list[Any]: with open(config_path, "r") as fh: return list(yaml.load_all(fh, Loader=yaml.Loader)) -def cached_yaml_load_all(config_path: str) -> List[Any]: +def cached_yaml_load_all(config_path: str) -> list[Any]: """Cached :func:`yaml.load_all` Load all configurations stored in the configuration file as separated yaml documents @@ -1310,7 +1306,7 @@ def cached_yaml_load_all(config_path: str) -> List[Any]: def get_bucket_name_and_prefix( url: str, bucket_path_level: Optional[int] = None -) -> Tuple[Optional[str], Optional[str]]: +) -> tuple[Optional[str], Optional[str]]: """Extract bucket name and prefix from URL :param url: (optional) URL to use as product.location @@ -1369,10 +1365,10 @@ def deepcopy(sth: Any) -> Any: :param sth: Object to copy :returns: Copied object """ - _dispatcher: Dict[Type[Any], Callable[..., Any]] = {} + _dispatcher: dict[type[Any], Callable[..., Any]] = {} def _copy_list( - input_list: List[Any], dispatch: Dict[Type[Any], Callable[..., Any]] + input_list: list[Any], dispatch: dict[type[Any], Callable[..., Any]] ): ret = input_list.copy() for idx, item in enumerate(ret): @@ -1382,7 +1378,7 @@ def _copy_list( return ret def _copy_dict( - input_dict: Dict[Any, Any], dispatch: Dict[Type[Any], Callable[..., Any]] + input_dict: dict[Any, Any], dispatch: dict[type[Any], Callable[..., Any]] ): ret = input_dict.copy() for key, value in ret.items(): @@ -1505,7 +1501,7 @@ def get_ssl_context(ssl_verify: bool) -> ssl.SSLContext: return ctx -def sort_dict(input_dict: Dict[str, Any]) -> Dict[str, Any]: +def sort_dict(input_dict: dict[str, Any]) -> dict[str, Any]: """ Recursively sorts a dict by keys. @@ -1521,7 +1517,7 @@ def sort_dict(input_dict: Dict[str, Any]) -> Dict[str, Any]: } -def dict_md5sum(input_dict: Dict[str, Any]) -> str: +def dict_md5sum(input_dict: dict[str, Any]) -> str: """ Hash nested dictionary diff --git a/eodag/utils/exceptions.py b/eodag/utils/exceptions.py index 4c8f86708..883ca5216 100644 --- a/eodag/utils/exceptions.py +++ b/eodag/utils/exceptions.py @@ -20,7 +20,7 @@ from typing import TYPE_CHECKING, Annotated if TYPE_CHECKING: - from typing import Optional, Set + from typing import Optional from typing_extensions import Doc, Self @@ -108,7 +108,7 @@ def from_error(cls, error: Exception, msg: Optional[str] = None) -> Self: class ValidationError(RequestError): """Error validating data""" - def __init__(self, message: str, parameters: Set[str] = set()) -> None: + def __init__(self, message: str, parameters: set[str] = set()) -> None: self.message = message self.parameters = parameters diff --git a/eodag/utils/import_system.py b/eodag/utils/import_system.py index 4afcf70ee..1481b4e73 100644 --- a/eodag/utils/import_system.py +++ b/eodag/utils/import_system.py @@ -21,14 +21,14 @@ import pkgutil from contextlib import contextmanager from functools import partial -from typing import TYPE_CHECKING, Any, Generator, Tuple +from typing import TYPE_CHECKING, Any, Generator if TYPE_CHECKING: from types import ModuleType def import_all_modules( - base_package: ModuleType, depth: int = 1, exclude: Tuple[str, ...] = () + base_package: ModuleType, depth: int = 1, exclude: tuple[str, ...] = () ) -> None: """Import all modules in base_package, including modules in the sub-packages up to `depth` and excluding modules in `exclude`. diff --git a/eodag/utils/requests.py b/eodag/utils/requests.py index dd939b5b2..b07358097 100644 --- a/eodag/utils/requests.py +++ b/eodag/utils/requests.py @@ -19,7 +19,7 @@ import logging import os -from typing import Any, Optional, Tuple +from typing import Any, Optional import requests @@ -75,7 +75,7 @@ class LocalFileAdapter(requests.adapters.BaseAdapter): """ @staticmethod - def _chkpath(method: str, path: str) -> Tuple[int, str]: + def _chkpath(method: str, path: str) -> tuple[int, str]: """Return an HTTP status for the given filesystem path. :param method: method of the request diff --git a/eodag/utils/rest.py b/eodag/utils/rest.py index eb82d0a49..d688037a5 100644 --- a/eodag/utils/rest.py +++ b/eodag/utils/rest.py @@ -21,7 +21,7 @@ import datetime import re -from typing import Any, Dict, Optional, Tuple +from typing import Any, Optional import dateutil.parser from dateutil import tz @@ -35,7 +35,7 @@ ) -def get_datetime(arguments: Dict[str, Any]) -> Tuple[Optional[str], Optional[str]]: +def get_datetime(arguments: dict[str, Any]) -> tuple[Optional[str], Optional[str]]: """Get start and end dates from a dict containing `/` separated dates in `datetime` item :param arguments: dict containing a single date or `/` separated dates in `datetime` item diff --git a/eodag/utils/stac_reader.py b/eodag/utils/stac_reader.py index 6599fe4c0..43d41b959 100644 --- a/eodag/utils/stac_reader.py +++ b/eodag/utils/stac_reader.py @@ -20,7 +20,7 @@ import logging import re import socket -from typing import Any, Callable, Dict, List, Optional, Union +from typing import Any, Callable, Optional, Union from urllib.error import URLError from urllib.request import urlopen @@ -108,7 +108,7 @@ def fetch_stac_items( max_connections: int = 100, timeout: int = HTTP_REQ_TIMEOUT, ssl_verify: bool = True, -) -> List[Dict[str, Any]]: +) -> list[dict[str, Any]]: """Fetch STAC item from a single item file or items from a catalog. :param stac_path: A STAC object filepath @@ -142,13 +142,13 @@ def _fetch_stac_items_from_catalog( recursive: bool, max_connections: int, _text_opener: Callable[[str, bool], Any], -) -> List[Any]: +) -> list[Any]: """Fetch items from a STAC catalog""" - items: List[Dict[Any, Any]] = [] + items: list[dict[Any, Any]] = [] # pystac cannot yet return links from a single file catalog, see: # https://github.com/stac-utils/pystac/issues/256 - extensions: Optional[Union[List[str], str]] = getattr(cat, "stac_extensions", None) + extensions: Optional[Union[list[str], str]] = getattr(cat, "stac_extensions", None) if extensions: extensions = extensions if isinstance(extensions, list) else [extensions] if "single-file-stac" in extensions: @@ -157,7 +157,7 @@ def _fetch_stac_items_from_catalog( # Making the links absolutes allow for both relative and absolute links to be handled. if not recursive: - hrefs: List[Optional[str]] = [ + hrefs: list[Optional[str]] = [ link.get_absolute_href() for link in cat.get_item_links() ] else: @@ -188,7 +188,7 @@ def fetch_stac_collections( max_connections: int = 100, timeout: int = HTTP_REQ_TIMEOUT, ssl_verify: bool = True, -) -> List[Dict[str, Any]]: +) -> list[dict[str, Any]]: """Fetch STAC collection(s) from a catalog. :param stac_path: A STAC object filepath @@ -217,12 +217,12 @@ def _fetch_stac_collections_from_catalog( collection: Optional[str], max_connections: int, _text_opener: Callable[[str, bool], Any], -) -> List[Any]: +) -> list[Any]: """Fetch collections from a STAC catalog""" - collections: List[Dict[Any, Any]] = [] + collections: list[dict[Any, Any]] = [] # Making the links absolutes allow for both relative and absolute links to be handled. - hrefs: List[Optional[str]] = [ + hrefs: list[Optional[str]] = [ link.get_absolute_href() for link in cat.get_child_links() if collection is not None and link.title == collection diff --git a/tests/integration/test_core_search_results.py b/tests/integration/test_core_search_results.py index 45aa7d39d..0b4589cf3 100644 --- a/tests/integration/test_core_search_results.py +++ b/tests/integration/test_core_search_results.py @@ -204,7 +204,7 @@ def test_group_by_extent(self): [first_search, second_search, third_search] ) - # The returned value is a List[SearchResult] + # The returned value is a list[SearchResult] self.assertIsInstance(grouped_searches, list) self.assertTrue(all(isinstance(sr, SearchResult) for sr in grouped_searches)) # We expect three groups because we have given products that have diff --git a/tests/test_requirements.py b/tests/test_requirements.py index 99dbbcf23..c89ba72a6 100644 --- a/tests/test_requirements.py +++ b/tests/test_requirements.py @@ -22,7 +22,7 @@ import re import sys import unittest -from typing import Any, Dict, Iterator, Set +from typing import Any, Iterator import importlib_metadata from packaging.requirements import Requirement @@ -59,9 +59,9 @@ def get_imports(filepath: str) -> Iterator[Any]: yield node.module.split(".")[0] -def get_project_imports(project_path: str) -> Set[str]: +def get_project_imports(project_path: str) -> set[str]: """Get python imports from the project path""" - imports: Set[str] = set() + imports: set[str] = set() for dirpath, dirs, files in os.walk(project_path): for filename in files: if filename.endswith(".py"): @@ -83,7 +83,7 @@ def get_setup_requires(setup_cfg_path: str): ) -def get_optional_dependencies(setup_cfg_path: str, extra: str) -> Set[str]: +def get_optional_dependencies(setup_cfg_path: str, extra: str) -> set[str]: """Get extra requirements from the given setup.cfg file path""" config = configparser.ConfigParser() config.read(setup_cfg_path) @@ -98,7 +98,7 @@ def get_optional_dependencies(setup_cfg_path: str, extra: str) -> Set[str]: return deps -def get_resulting_extras(setup_cfg_path: str, extra: str) -> Set[str]: +def get_resulting_extras(setup_cfg_path: str, extra: str) -> set[str]: """Get resulting extras for a single extra from the given setup.cfg file path""" config = configparser.ConfigParser() config.read(setup_cfg_path) @@ -109,7 +109,7 @@ def get_resulting_extras(setup_cfg_path: str, extra: str) -> Set[str]: return extras -def get_entrypoints_extras(setup_cfg_path: str) -> Dict[str, str]: +def get_entrypoints_extras(setup_cfg_path: str) -> dict[str, str]: """Get entrypoints and associated extra from the given setup.cfg file path""" config = configparser.ConfigParser() config.read(setup_cfg_path) diff --git a/tests/units/test_download_plugins.py b/tests/units/test_download_plugins.py index 9d5f8a47d..274781bb2 100644 --- a/tests/units/test_download_plugins.py +++ b/tests/units/test_download_plugins.py @@ -25,7 +25,7 @@ import zipfile from pathlib import Path from tempfile import NamedTemporaryFile, TemporaryDirectory, gettempdir -from typing import Any, Callable, Dict +from typing import Any, Callable from unittest import mock import responses @@ -244,7 +244,7 @@ def _set_download_simulation( ) def _dummy_product( - self, provider: str, properties: Dict[str, Any], productType: str + self, provider: str, properties: dict[str, Any], productType: str ): return EOProduct( provider, @@ -257,7 +257,7 @@ def _dummy_downloadable_product( mock_requests_session: Callable[[], None], local_product_as_archive_path: str, provider: str, - properties: Dict[str, Any], + properties: dict[str, Any], productType: str, ): self._set_download_simulation( diff --git a/tests/units/test_http_server.py b/tests/units/test_http_server.py index afb1452ff..37d7363f0 100644 --- a/tests/units/test_http_server.py +++ b/tests/units/test_http_server.py @@ -24,7 +24,7 @@ import unittest from pathlib import Path from tempfile import TemporaryDirectory -from typing import Any, Dict, List, Optional, Union +from typing import Any, Optional, Union from unittest.mock import Mock, call import geojson @@ -322,7 +322,7 @@ def _request_valid_raw( url: str, mock_search: Mock, expected_search_kwargs: Union[ - List[Dict[str, Any]], Dict[str, Any], None + list[dict[str, Any]], dict[str, Any], None ] = None, method: str = "GET", post_data: Optional[Any] = None, @@ -368,7 +368,7 @@ def _request_valid( self, url: str, expected_search_kwargs: Union[ - List[Dict[str, Any]], Dict[str, Any], None + list[dict[str, Any]], dict[str, Any], None ] = None, method: str = "GET", post_data: Optional[Any] = None, diff --git a/tests/units/test_stac_utils.py b/tests/units/test_stac_utils.py index ada153c67..144993fe0 100644 --- a/tests/units/test_stac_utils.py +++ b/tests/units/test_stac_utils.py @@ -131,7 +131,7 @@ def test_str2list(self): ) def test_is_dict_str_any(self): - """is_dict_str_any verifies whether the input variable is of type Dict[str, Any]""" + """is_dict_str_any verifies whether the input variable is of type dict[str, Any]""" self.assertTrue( self.rest_utils.is_dict_str_any({"key1": "value1", "key2": "value2"}) ) @@ -162,7 +162,7 @@ def test_flatten_list(self): def test_list_to_str_list(self): """ - list_to_str_list convert a List[Any] to a List[str]. + list_to_str_list convert a list[Any] to a list[str]. It raises a TypeError if the convertion cannot be done """ self.assertEqual(self.rest_utils.list_to_str_list([1, 2, 3]), ["1", "2", "3"]) diff --git a/tests/utils.py b/tests/utils.py index 435fc69a1..32fb60744 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -16,7 +16,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import os -from typing import Any, Dict, Literal, Optional +from typing import Any, Literal, Optional # All tests files should import mock from this place from unittest import mock @@ -62,7 +62,7 @@ def write_eodag_conf_with_fake_credentials(config_file): def mock_request( url: str, - body: Optional[Dict[str, Any]] = None, + body: Optional[dict[str, Any]] = None, method: Optional[Literal["GET", "POST"]] = "GET", ) -> mock.Mock: parsed_url = urlparse(url) diff --git a/utils/params_mapping_to_csv.py b/utils/params_mapping_to_csv.py index 975afeec7..531fcd004 100644 --- a/utils/params_mapping_to_csv.py +++ b/utils/params_mapping_to_csv.py @@ -21,7 +21,6 @@ import json import logging import os -from typing import List import requests from lxml import html @@ -76,7 +75,7 @@ def params_mapping_to_csv( ) # list of lists of all parameters per provider - params_list_of_lists: List[List[str]] = [] + params_list_of_lists: list[list[str]] = [] for p in dag.providers_config.keys(): if hasattr(dag.providers_config[p], "search") and hasattr( dag.providers_config[p].search, "metadata_mapping" @@ -86,7 +85,7 @@ def params_mapping_to_csv( ) # union of params_list_of_lists - global_keys: List[str] = sorted(list(set().union(*(params_list_of_lists)))) + global_keys: list[str] = sorted(list(set().union(*(params_list_of_lists)))) # csv fieldnames fieldnames = ["parameter"] + sorted(dag.providers_config.keys()) diff --git a/utils/product_types_information_to_csv.py b/utils/product_types_information_to_csv.py index 6a0e73e4c..df3957cf4 100644 --- a/utils/product_types_information_to_csv.py +++ b/utils/product_types_information_to_csv.py @@ -20,7 +20,7 @@ import csv import os import re -from typing import Any, Dict, List +from typing import Any from eodag.api.core import EODataAccessGateway from eodag.config import load_default_config @@ -61,7 +61,7 @@ def product_types_info_to_csv( os.environ.update(eodag_env_backup) product_types = dag.list_product_types(fetch_providers=False) - product_types_names: List[str] = [ + product_types_names: list[str] = [ product_type["ID"] for product_type in product_types ] metadata_params = list(k for k in product_types[0].keys() if k != "ID") @@ -77,7 +77,7 @@ def product_types_info_to_csv( product_types_writer.writeheader() # create product types table rows - product_types_rows: Dict[str, Any] = {} + product_types_rows: dict[str, Any] = {} for product_type_name in product_types_names: product_types_rows[product_type_name] = {"product type": product_type_name} for metadata_param in metadata_params: