diff --git a/python/idsse_common/idsse/common/aws_utils.py b/python/idsse_common/idsse/common/aws_utils.py index 500eb62..9e25ffc 100644 --- a/python/idsse_common/idsse/common/aws_utils.py +++ b/python/idsse_common/idsse/common/aws_utils.py @@ -12,8 +12,8 @@ import logging import fnmatch import os +from collections.abc import Sequence from datetime import datetime, timedelta, UTC -from typing import Sequence, Set, Tuple, Optional from .path_builder import PathBuilder from .utils import TimeDelta, datetime_gen, exec_cmd @@ -91,7 +91,7 @@ def aws_cp(self, path: str, dest: str) -> bool: finally: pass - def check_for(self, issue: datetime, valid: datetime) -> Optional[Tuple[datetime, str]]: + def check_for(self, issue: datetime, valid: datetime) -> tuple[datetime, str] | None: """Checks if an object passed issue/valid exists Args: @@ -99,7 +99,7 @@ def check_for(self, issue: datetime, valid: datetime) -> Optional[Tuple[datetime valid (datetime): The valid date/time used to format the path to the object's location Returns: - Optional[Tuple[datetime, str]]: A tuple of the valid date/time (indicated by object's + [tuple[datetime, str] | None]: A tuple of the valid date/time (indicated by object's location) and location (path) of a object, or None if object does not exist """ @@ -117,7 +117,7 @@ def check_for(self, issue: datetime, valid: datetime) -> Optional[Tuple[datetime def get_issues(self, num_issues: int = 1, - issue_start: Optional[datetime] = None, + issue_start: datetime | None = None, issue_end: datetime = datetime.now(UTC), time_delta: timedelta = timedelta(hours=1) ) -> Sequence[datetime]: @@ -136,7 +136,7 @@ def get_issues(self, if time_delta == zero_time_delta: raise ValueError('Time delta must be non zero') - issues_set: Set[datetime] = set() + issues_set: set[datetime] = set() if issue_start: datetimes = datetime_gen(issue_end, time_delta, issue_start, num_issues) else: @@ -161,19 +161,19 @@ def get_issues(self, def get_valids(self, issue: datetime, - valid_start: Optional[datetime] = None, - valid_end: Optional[datetime] = None) -> Sequence[Tuple[datetime, str]]: + valid_start: datetime | None = None, + valid_end: datetime | None = None) -> Sequence[tuple[datetime, str]]: """Get all objects consistent with the passed issue date/time and filter by valid range Args: issue (datetime): The issue date/time used to format the path to the object's location - valid_start (datetime, optional): All returned objects will be for + valid_start (datetime | None, optional): All returned objects will be for valids >= valid_start. Defaults to None. - valid_end (datetime, optional): All returned objects will be for valids <= valid_end. - Defaults to None. + valid_end (datetime | None, optional): All returned objects will be for + valids <= valid_end. Defaults to None. Returns: - Sequence[Tuple[datetime, str]]: A sequence of tuples with valid date/time (indicated by + Sequence[tuple[datetime, str]]: A sequence of tuples with valid date/time (indicated by object's location) and the object's location (path). Empty Sequence if no valids found for given time range. """ diff --git a/python/idsse_common/idsse/common/config.py b/python/idsse_common/idsse/common/config.py index 77ce3e0..584065e 100644 --- a/python/idsse_common/idsse/common/config.py +++ b/python/idsse_common/idsse/common/config.py @@ -12,8 +12,9 @@ import glob import json import logging +from collections.abc import Iterable from inspect import signature -from typing import Self, Union, List, Optional +from typing import Self logger = logging.getLogger(__name__) @@ -21,8 +22,8 @@ class Config: """Configuration data class""" def __init__(self, - config: Union[dict, List[dict], str], - keys: Optional[Union[list, str]] = None, + config: dict | Iterable[dict] | str, + keys: Iterable | str | None = None, recursive: bool = False, ignore_missing: bool = False) -> None: @@ -90,7 +91,7 @@ def _load_from_filepath(self, filepath: str) -> dict: with open(filepath, 'r', encoding='utf8') as file: return json.load(file) - def _from_filepaths(self, filepaths: List[str], keys: str) -> Self: + def _from_filepaths(self, filepaths: Iterable[str], keys: str) -> Self: config_dicts = [self._load_from_filepath(filepath) for filepath in filepaths] self._from_config_dicts(config_dicts, keys) @@ -106,7 +107,7 @@ def _from_config_dict(self, config_dict: dict, keys: str) -> Self: # update the instance dictionary to hold all configuration attributes return self.__dict__.update(config_dict) - def _from_config_dicts(self, config_dicts: List[dict], keys: str) -> Self: + def _from_config_dicts(self, config_dicts: Iterable[dict], keys: str) -> Self: self._from_config_dict(config_dicts[0], keys) for config_dict in config_dicts[1:]: # if inherited class takes only one argument diff --git a/python/idsse_common/idsse/common/json_message.py b/python/idsse_common/idsse/common/json_message.py index b1c4125..04a8af0 100644 --- a/python/idsse_common/idsse/common/json_message.py +++ b/python/idsse_common/idsse/common/json_message.py @@ -10,23 +10,24 @@ # ------------------------------------------------------------------------------ import json -from typing import Any, Dict, List, Optional, Tuple, Union +from collections.abc import Sequence +from typing import Any from uuid import UUID, uuid4 -Json = Union[Dict[str, Any], List[Any], int, str, float, bool, type[None]] +Json = dict[str, Any] | Sequence[Any] | int | str | float | bool | None def get_corr_id( - message: Union[str, dict] -) -> Optional[Tuple[Optional[str], Optional[Union[UUID, str]], Optional[str]]]: + message: str | dict +) -> tuple[str | None, UUID | str | None, str | None] | None: """Extract the correlation id from a json message. The correlation id is made of three parts: originator, uuid, issue date/time Args: - message (Union[str, json]): The message to be searched as either a string or json obj + message (str | json]): The message to be searched as either a string or json obj Returns: - Optional[Tuple[Optional[str], Optional[Union[UUID, str]], Optional[str]]]: + tuple[str | None, UUID | str | None, str | None] | None: A tuple containing originator, uuid, and issue date/time, or None if a given part was not found. Returns simply None if no parts found """ @@ -46,17 +47,17 @@ def get_corr_id( return None -def add_corr_id(message: Union[dict, str], +def add_corr_id(message: dict | str, originator: str, - uuid_: Optional[Union[UUID, str]] = None, - issue_dt: Optional[str] = None) -> dict: + uuid_: UUID | str | None = None, + issue_dt: str | None = None) -> dict: """Add (or overwrites) the three part correlation id to a json message Args: - message (Union[dict, str]): The message to be updated + message (dict | str): The message to be updated originator (str): String representation of the originating service - uuid_ (Union[UUID, str], optional): A UUID. Defaults to None. - issue_dt (str, optional): The specific issue date/time associated with the message. + uuid_ (UUID | str | None, optional): A UUID. Defaults to None. + issue_dt (str | None, optional): The specific issue date/time associated with the message. Defaults to None. Returns: diff --git a/python/idsse_common/idsse/common/log_util.py b/python/idsse_common/idsse/common/log_util.py index 99005c5..10be56c 100644 --- a/python/idsse_common/idsse/common/log_util.py +++ b/python/idsse_common/idsse/common/log_util.py @@ -15,9 +15,9 @@ import logging import time import uuid +from collections.abc import Sequence from contextvars import ContextVar from datetime import datetime -from typing import Union, Optional, List from .utils import to_iso @@ -28,8 +28,8 @@ def set_corr_id_context_var( originator: str, - key: Optional[uuid.UUID] = None, - issue_dt: Optional[Union[str, datetime]] = None + key: uuid.UUID = uuid.uuid4(), + issue_dt: str | datetime | None = None ) -> None: """ Build and set correlation ID ContextVar for logging module, based on originator and @@ -37,12 +37,9 @@ def set_corr_id_context_var( Args: originator (str): Function, class, service name, etc. that is using logging module - key (Optional[uuid.UUID]): a UUID. Default: randomly generated UUID. - issue_dt (Optional[Union[str, datetime]]): Datetime when a relevant forecast was issued + key (uuid.UUID, optional): a UUID. Default: randomly generated UUID. + issue_dt (str | datetime | None, optional): Datetime when a relevant forecast was issued """ - if not key: - key = uuid.uuid4() - if issue_dt: if not isinstance(issue_dt, str): issue_dt = to_iso(issue_dt) @@ -56,7 +53,7 @@ def get_corr_id_context_var_str() -> str: return corr_id_context_var.get() -def get_corr_id_context_var_parts() -> List[str]: +def get_corr_id_context_var_parts() -> Sequence[str]: """Split correlation ID ContextVar into its parts, such as [originator, key, issue_datetime]""" return corr_id_context_var.get().split(';') @@ -159,7 +156,8 @@ def get_default_log_config(level: str, 'loggers': { '': { 'level': level, - 'handlers': ['default', 'rabbit'] + # 'handlers': ['default', 'rabbit'] + 'handlers': ['default'] }, } } diff --git a/python/idsse_common/idsse/common/path_builder.py b/python/idsse_common/idsse/common/path_builder.py index bbfb5be..92a5286 100644 --- a/python/idsse_common/idsse/common/path_builder.py +++ b/python/idsse_common/idsse/common/path_builder.py @@ -19,7 +19,7 @@ import os import re from datetime import datetime, timedelta, UTC -from typing import Dict, Self, Union +from typing import Self from .utils import TimeDelta @@ -115,17 +115,17 @@ def path_fmt(self): return os.path.join(self.dir_fmt, self.filename_fmt) def build_dir(self, - issue: datetime = None, - valid: datetime = None, - lead: Union[timedelta, TimeDelta] = None) -> str: + issue: datetime | None = None, + valid: datetime | None = None, + lead: timedelta | TimeDelta | None = None) -> str: """Attempts to build the directory with provided arguments Args: - issue (datetime, optional): Issue datetime, should be provided is the + issue (datetime | None, optional): Issue datetime, should be provided is the directory is dependant on it. Defaults to None. - valid (datetime, optional): Valid datetime, should be provided is the + valid (datetime | None, optional): Valid datetime, should be provided is the directory is dependant on it. . Defaults to None. - lead (Union[timedelta, TimeDelta], optional): Lead can be provided in addition + lead (timedelta | TimeDelta | None, optional): Lead can be provided in addition to issue or valid. Defaults to None. Returns: @@ -137,17 +137,17 @@ def build_dir(self, return self.dir_fmt.format(issue=issue, valid=valid, lead=lead) def build_filename(self, - issue: datetime = None, - valid: datetime = None, - lead: Union[timedelta, TimeDelta] = None) -> str: + issue: datetime | None = None, + valid: datetime | None = None, + lead: timedelta | TimeDelta | None = None) -> str: """Attempts to build the filename with provided arguments Args: - issue (datetime, optional): Issue datetime, should be provided is the + issue (datetime | None, optional): Issue datetime, should be provided is the filename is dependant on it. Defaults to None. - valid (datetime, optional): Valid datetime, should be provided is the + valid (datetime | None, optional): Valid datetime, should be provided is the filename is dependant on it. . Defaults to None. - lead (Union[timedelta, TimeDelta], optional): Lead can be provided in addition + lead (timedelta | TimeDelta | None, optional): Lead can be provided in addition to issue or valid. Defaults to None. Returns: @@ -157,17 +157,17 @@ def build_filename(self, return self.filename_fmt.format(issue=issue, valid=valid, lead=lead) def build_path(self, - issue: datetime = None, - valid: datetime = None, - lead: Union[timedelta, TimeDelta] = None) -> str: + issue: datetime | None = None, + valid: datetime | None = None, + lead: timedelta | TimeDelta | None = None) -> str: """Attempts to build the path with provided arguments Args: - issue (datetime, optional): Issue datetime, should be provided is the + issue (datetime | None, optional): Issue datetime, should be provided is the path is dependant on it. Defaults to None. - valid (datetime, optional): Valid datetime, should be provided is the + valid (datetime | None, optional): Valid datetime, should be provided is the path is dependant on it. . Defaults to None. - lead (Union[timedelta, TimeDelta], optional): Lead can be provided in addition + lead (timedelta | TimeDelta | None, optional): Lead can be provided in addition to issue or valid. Defaults to None. Returns: @@ -235,17 +235,17 @@ def get_valid(self, path: str) -> datetime: return self.get_valid_from_time_args(time_args) @staticmethod - def get_issue_from_time_args(parsed_args: Dict, - valid: datetime = None, - lead: timedelta = None) -> datetime: + def get_issue_from_time_args(parsed_args: dict, + valid: datetime | None = None, + lead: timedelta | None = None) -> datetime: """Static method for creating an issue date/time from parsed arguments and optional inputs Args: parsed_args (dict): A dictionary of issue, valid and/or lead info resulting from parsing a path, dir, or filename - valid (datetime, optional): Depending on info found during parsing, valid date/time - can be useful. Defaults to None. - lead (timedelta, optional): Depending on info found during parsing, lead time + valid (datetime | None, optional): Depending on info found during parsing, + valid date/time can be useful. Defaults to None. + lead (timedelta | None, optional): Depending on info found during parsing, lead time can be useful. . Defaults to None. Returns: @@ -274,17 +274,17 @@ def get_issue_from_time_args(parsed_args: Dict, @staticmethod def get_valid_from_time_args(parsed_args: dict, - issue: datetime = None, - lead: timedelta = None) -> datetime: + issue: datetime | None = None, + lead: timedelta | None = None) -> datetime: """Static method for creating a valid date/time from parsed arguments and optional inputs Args: parsed_args (dict): A dictionary of issue, valid and/or lead info resulting from parsing a path, dir, or filename - issue (datetime, optional): Depending on info found during parsing, issue date/time + issue (datetime | None, optional): Depending on info found during parsing, + issue date/time can be useful. Defaults to None. + lead (timedelta | None, optional): Depending on info found during parsing, lead time can be useful. Defaults to None. - lead (timedelta, optional): Depending on info found during parsing, lead time - can be useful. . Defaults to None. Returns: datetime: Valid date/time @@ -329,7 +329,7 @@ def get_lead_from_time_args(time_args: dict) -> timedelta: @staticmethod def _ensure_lead(issue: datetime, valid: datetime, - lead: Union[timedelta, TimeDelta]) -> TimeDelta: + lead: timedelta | TimeDelta) -> TimeDelta: if lead: if isinstance(lead, timedelta): return TimeDelta(lead) @@ -338,8 +338,8 @@ def _ensure_lead(issue: datetime, return TimeDelta(valid-issue) return None - def _parse_times(self, string: str, format_str: str) -> Dict: - def parse_args(key: str, value: str, result: Dict): + def _parse_times(self, string: str, format_str: str) -> dict: + def parse_args(key: str, value: str, result: dict): for arg in key.split('{')[1:]: var_name, var_size = arg.split(':') var_type = var_size[2:3] diff --git a/python/idsse_common/idsse/common/publish_confirm.py b/python/idsse_common/idsse/common/publish_confirm.py index b7ef4af..ea4dc64 100644 --- a/python/idsse_common/idsse/common/publish_confirm.py +++ b/python/idsse_common/idsse/common/publish_confirm.py @@ -17,10 +17,11 @@ import logging.config import json import time +from collections.abc import Callable from dataclasses import dataclass, field from random import randint from threading import Thread, Event -from typing import Optional, Dict, NamedTuple, Union, Callable, cast +from typing import NamedTuple, cast from pika import SelectConnection, BasicProperties from pika.channel import Channel @@ -37,12 +38,12 @@ class PublishConfirmRecords: """Data class to track RabbitMQ activity metadata Args: - deliveries (Dict[int, str]): mapping of delivered message IDs to message content + deliveries (dict[int, str]): mapping of delivered message IDs to message content acked (int): Count of acknowledged RabbitMQ messages nacked (int): Count of unacknowledged RabbitMQ messages message_number (int): The ID which will be assigned to the next published message """ - deliveries: Dict[int, str] = field(default_factory=dict) + deliveries: dict[int, str] = field(default_factory=dict) acked: int = 0 nacked: int = 0 message_number: int = 0 @@ -79,29 +80,29 @@ def __init__(self, conn: Conn, exchange: Exch, queue: Queue): daemon=True, target=self._run) - self._connection: Optional[SelectConnection] = None - self._channel: Optional[Channel] = None + self._connection: SelectConnection | None = None + self._channel: Channel | None = None self._stopping = False self._rmq_params = PublishConfirmParams(conn, exchange, queue) self._records = PublishConfirmRecords() # data class to track message activity - self._on_ready_callback: Optional[Callable[[], None]] = None + self._on_ready_callback: Callable[[], None] | None = None def publish_message(self, - message: Dict, + message: dict, routing_key='', - corr_id: Optional[str] = None) -> bool: + corr_id: str | None = None) -> bool: """If the class is not stopping, publish a message to RabbitMQ, appending a list of deliveries with the message number that was sent. This list will be used to check for delivery confirmations in the on_delivery_confirmations method. Args: - message (Dict): message to publish (should be valid json) + message (dict): message to publish (should be valid json) routing_key (str): routing_key to route the message to correct consumer. Default is empty str - corr_id (Optional[str]): optional correlation_id to include in message + corr_id (str | None): optional correlation_id to include in message Returns: bool: True if message successfully published to queue (channel was open and @@ -173,12 +174,12 @@ def _run(self): # Finish closing self._connection.ioloop.start() - def _start(self, callback: Optional[Callable[[], None]] = None): + def _start(self, callback: Callable[[], None] | None = None): """ Start a thread to handle PublishConfirm operations Args: - callback (Optional[Callable[[], None]]): callback function to be invoked + callback (Callable[[], None] | None): callback function to be invoked once instance is ready to publish messages (all RabbitMQ connection and channel are set up, delivery confirmation is enabled, etc.). Default to None. """ @@ -297,12 +298,12 @@ def _on_channel_closed(self, channel: Channel, reason: Exception): if not self._stopping: self._close_connection() - def _on_exchange_declareok(self, _unused_frame: Method, userdata: Union[str, bytes]): + def _on_exchange_declareok(self, _unused_frame: Method, userdata: str | bytes): """Invoked by pika when RabbitMQ has finished the Exchange.Declare RPC command. Args: _unused_frame (Frame.Method): Exchange.DeclareOk response frame - userdata (Union[str, bytes]): Extra user data (exchange name) + userdata (str | bytes): Extra user data (exchange name) """ logger.debug('Exchange declared: %s', userdata) @@ -374,7 +375,7 @@ def _on_delivery_confirmation(self, method_frame: Method): method_frame (Method): Basic.Ack or Basic.Nack frame """ # tell python type checker that method will be an Ack or Nack (per pika docs) - method = cast(Union[Basic.Ack, Basic.Nack], method_frame.method) + method = cast(Basic.Ack | Basic.Nack, method_frame.method) confirmation_type = method.NAME.split('.')[1].lower() ack_multiple = method.multiple diff --git a/python/idsse_common/idsse/common/rabbitmq_utils.py b/python/idsse_common/idsse/common/rabbitmq_utils.py index 7176722..b9e505d 100644 --- a/python/idsse_common/idsse/common/rabbitmq_utils.py +++ b/python/idsse_common/idsse/common/rabbitmq_utils.py @@ -13,10 +13,12 @@ import logging import logging.config -from typing import Callable, NamedTuple, Optional, Tuple, Union +from collections.abc import Callable +from typing import NamedTuple from pika import BasicProperties, ConnectionParameters, PlainCredentials -from pika.adapters import BlockingConnection, blocking_connection +from pika.adapters import BlockingConnection +from pika.adapters.blocking_connection import BlockingChannel from pika.frame import Method from pika.spec import Basic @@ -77,7 +79,7 @@ class RabbitMqParams(NamedTuple): def _initialize_exchange_and_queue( - channel: blocking_connection.BlockingChannel, + channel: BlockingChannel, params: RabbitMqParams ) -> str: """Declare and bind RabbitMQ exchange and queue using the provided channel. @@ -111,12 +113,12 @@ def _initialize_exchange_and_queue( def subscribe_to_queue( - connection: Union[Conn, BlockingConnection], + connection: Conn | BlockingConnection, params: RabbitMqParams, on_message_callback: Callable[ - [blocking_connection.BlockingChannel, Basic.Deliver, BasicProperties, bytes], None], - channel: Optional[blocking_connection.BlockingChannel] = None -) -> Tuple[BlockingConnection, blocking_connection.BlockingChannel]: + [BlockingChannel, Basic.Deliver, BasicProperties, bytes], None], + channel: BlockingChannel | None = None +) -> tuple[BlockingConnection, BlockingChannel]: """ Function that handles setup of consumer of RabbitMQ queue messages, declaring the exchange and queue if needed, and invoking the provided callback when a message is received. @@ -129,18 +131,18 @@ def subscribe_to_queue( close gracefully with connection.close() Args: - connection (Union[Conn, BlockingConnection]): connection parameters to establish new + connection (Conn | BlockingConnection): connection parameters to establish new RabbitMQ connection, or existing RabbitMQ connection to reuse for this consumer. params (RabbitMqParams): parameters for the RabbitMQ exchange and queue from which to consume messages. on_message_callback (Callable[ [BlockingChannel, Basic.Deliver, BasicProperties, bytes], None]): function to handle messages that are received over the subscribed exchange and queue. - channel (Optional[BlockingChannel]): optional existing (open) RabbitMQ channel to reuse. + channel (BlockingChannel | None): optional existing (open) RabbitMQ channel to reuse. Default is to create unique channel for this consumer. Returns: - Tuple[BlockingConnection, BlockingChannel]: the connection and channel, which are now open + tuple[BlockingConnection, BlockingChannel]: the connection and channel, which are now open and subscribed to the provided queue. """ if isinstance(connection, Conn): diff --git a/python/idsse_common/idsse/common/sci/geo_image.py b/python/idsse_common/idsse/common/sci/geo_image.py index 6d6755a..f63dfb8 100644 --- a/python/idsse_common/idsse/common/sci/geo_image.py +++ b/python/idsse_common/idsse/common/sci/geo_image.py @@ -49,9 +49,9 @@ class ColorPalette(NamedTuple): def linear( cls, colors: Sequence[Color], - anchors: Sequence[int] = None, - min_value: float = None, - max_value: float = None + anchors: Sequence[int] | None = None, + min_value: float | None = None, + max_value: float | None = None ) -> Self: """Create a color palette by linearly interpolating between colors @@ -177,9 +177,9 @@ def from_data_grid( # pylint: disable=too-many-arguments data_array: np.ndarray, colors: ColorPalette | None = None, scale: int = 1, - min_value: float = None, - max_value: float = None, - fill_value: float = None + min_value: float | None = None, + max_value: float | None = None, + fill_value: float | None = None ) -> Self: """Method for building a geographical image from data in a ndarray. @@ -191,11 +191,11 @@ def from_data_grid( # pylint: disable=too-many-arguments scale will be used. scale (int, optional): The height and width that a grid cell will be scaled to in the image. Defaults to 1. - min_value (float, optional): The minimum value used for normalizing the data. + min_value (float | None, optional): The minimum value used for normalizing the data. Default to None, in which case use the min(data). - max_value (float, optional): The maximum value used for normalizing the data. + max_value (float | None, optional): The maximum value used for normalizing the data. Default to None, in which case use the max(data). - fill_value (float, optional): If specified this value will not be normalized. + fill_value (float | None, optional): If specified this value will not be normalized. Default to None. Returns: @@ -464,23 +464,24 @@ def draw_state_boundary(self, state: str | Sequence[str], color: Color): def normalize( array: np.ndarray, - min_value: float = None, - max_value: float = None, - missing_value: float = None + min_value: float | None = None, + max_value: float | None = None, + missing_value: float | None = None ) -> np.ndarray | np.ma.MaskedArray: """Normalize a data array, map the values in array to between [0, 1] Args: array (np.ndarray): Input data array - min_value (float, optional): If provided, the minimum value to to be mapped. + min_value (float | None, optional): If provided, the minimum value to to be mapped. Will map to zero, and all values less will map to -1 (-1 is out of range, but will be masked). Defaults to None. - max_value (float, optional): If provided, the maximum value to to be mapped. + max_value (float | None, optional): If provided, the maximum value to to be mapped. Will map to 1, and all values greater will map to 2 (2 is out of range, but will be masked). Defaults to None. - missing_value (float, optional): If provided, the value representing fill, which will not - be mapped to [0, 1]. Will map to np.nan, (NaN is not a valid - float between [0, 1], but will be masked). Defaults to None. + missing_value (float | None, optional): If provided, the value representing fill, which + will not be mapped to [0, 1]. Will map to np.nan, (NaN is not + a valid float between [0, 1], but will be masked). + Defaults to None. Returns: np.ndarray | np.ma.MaskedArray: If all data is mapped to [0, 1] the returned array will be diff --git a/python/idsse_common/idsse/common/sci/grid_proj.py b/python/idsse_common/idsse/common/sci/grid_proj.py index c52474a..5859c82 100644 --- a/python/idsse_common/idsse/common/sci/grid_proj.py +++ b/python/idsse_common/idsse/common/sci/grid_proj.py @@ -13,8 +13,9 @@ # pylint: disable=invalid-name # cspell:word fliplr, flipud -from typing import Self, Tuple, Union, Optional, Sequence, TypeVar, Iterable +from collections.abc import Sequence from enum import Enum +from typing import Self, TypeVar, Iterable import numpy as np from pyproj import CRS, Transformer @@ -24,11 +25,11 @@ from idsse.common.sci.utils import coordinate_pairs_to_axes # type hints -Scalar = Union[int, float, np.integer, np.float_] -ScalarPair = Tuple[Scalar, Scalar] +Scalar = int | float | np.integer | np.float_ +ScalarPair = tuple[Scalar, Scalar] ScalarArray = Sequence[Scalar] -Coordinate = Union[Scalar, ScalarPair, ScalarArray, np.ndarray] -CoordinatePair = Tuple[Coordinate, Coordinate] +Coordinate = Scalar | ScalarPair | ScalarArray | np.ndarray +CoordinatePair = tuple[Coordinate, Coordinate] # variables passed to GridProj.map_* methods can be anything in this list, but # method will always preserve the argument's type in the return value @@ -49,12 +50,12 @@ class GridProj: """ def __init__(self, crs: CRS, - lower_left_lat: Optional[float], - lower_left_lon: Optional[float], + lower_left_lat: float | None, + lower_left_lon: float | None, width: float, height: float, dx: float, - dy: Optional[float] = None): + dy: float | None = None): # pylint: disable=too-many-arguments,unpacking-non-sequence self._trans = Transformer.from_crs(crs.geodetic_crs, crs) self._x_offset = 0.0 @@ -136,9 +137,9 @@ def _transform( self, xx: T, yy: T, - direction: Union[TransformDirection, str] = TransformDirection.FORWARD - ) -> Tuple[T, T]: - """Transform any x coordinate/array and y coordinate/array to a Tuple of the same types, + direction: TransformDirection | str = TransformDirection.FORWARD + ) -> tuple[T, T]: + """Transform any x coordinate/array and y coordinate/array to a tuple of the same types, converted into the GridProj's coordination system. Wrapper for Transformer.transform() with more specific type hinting than pyproj (Any) @@ -149,15 +150,15 @@ def map_geo_to_pixel( self, lon: T, lat: T, - rounding: Optional[RoundingParam] = None, + rounding: RoundingParam | None = None, precision: int = 0 - ) -> Tuple[T, T]: + ) -> tuple[T, T]: """Map geographic coordinates to a pixel. Args: lon (T): single x geographic coordinate, or array of all x coordinates lat (T): single y geographic coordinate, or array of all y coordinates - rounding (Optional[RoundingParam]): + rounding ([RoundingParam | None]): ROUND to apply round_() to pixel values, FLOOR to apply math.floor(). Supports RoundingMethod enum value or str value (case insensitive). @@ -177,7 +178,7 @@ def map_geo_to_pixel( precision ) - def map_pixel_to_geo(self, x: T, y: T) -> Tuple[T, T]: + def map_pixel_to_geo(self, x: T, y: T) -> tuple[T, T]: """Map one or more pixel(s) x,y to a projection Args: @@ -185,13 +186,13 @@ def map_pixel_to_geo(self, x: T, y: T) -> Tuple[T, T]: y (T): y coordinate (or array) in pixel space Returns: - Tuple[T, T]: Single geographic coordinate as lon,lat, or + tuple[T, T]: Single geographic coordinate as lon,lat, or entire array of lat,lon pairs if arrays were passed """ crs_coordinates = self.map_pixel_to_crs(x, y) return self.map_crs_to_geo(*crs_coordinates) - def map_geo_to_crs(self, lon: T, lat: T) -> Tuple[T, T]: + def map_geo_to_crs(self, lon: T, lat: T) -> tuple[T, T]: """Map geographic coordinate (lon, lat), or array of longitudes and latitudes, to CRS Args: @@ -199,11 +200,11 @@ def map_geo_to_crs(self, lon: T, lat: T) -> Tuple[T, T]: lat (T): y geographic coordinate Returns: - Tuple[T, T]: Coordinate Reference System + tuple[T, T]: Coordinate Reference System """ return self._transform(lon, lat) - def map_pixel_to_crs(self, x: T, y: T) -> Tuple[T, T]: + def map_pixel_to_crs(self, x: T, y: T) -> tuple[T, T]: """Map pixel space (x,y) to Coordinate Reference System Args: @@ -211,7 +212,7 @@ def map_pixel_to_crs(self, x: T, y: T) -> Tuple[T, T]: y (T): y coordinate, or array of coordinates, in pixel space Returns: - Tuple[T, T]: Coordinate Reference System x and y pair (or pair of arrays) + tuple[T, T]: Coordinate Reference System x and y pair (or pair of arrays) """ if isinstance(x, Scalar) and isinstance(y, Scalar): # single x, y Pixel (base case) @@ -232,7 +233,7 @@ def map_pixel_to_crs(self, x: T, y: T) -> Tuple[T, T]: f'Cannot transpose pixel values of ({type(x).__name__})({type(y).__name__}) to CRS' ) - def map_crs_to_geo(self, x: T, y: T) -> Tuple[T, T]: + def map_crs_to_geo(self, x: T, y: T) -> tuple[T, T]: """Map Coordinate Reference System (x,y) to Geographical space (lon,lat) Args: @@ -240,7 +241,7 @@ def map_crs_to_geo(self, x: T, y: T) -> Tuple[T, T]: y (T): y coordinate, or array of coordinates, in CRS space Returns: - Tuple[T, T]: Geographic coordinate as lon,lat + tuple[T, T]: Geographic coordinate as lon,lat """ return self._transform(x, y, direction=TransformDirection.INVERSE) @@ -248,15 +249,15 @@ def map_crs_to_pixel( self, x: T, y: T, - rounding: Optional[RoundingParam] = None, + rounding: RoundingParam | None = None, precision: int = 0, - ) -> Tuple[T, T]: + ) -> tuple[T, T]: """Map Coordinate Reference System (x,y) coordinates to pixel x and y Args: x (T): x scalar, or array/list of x scalars, in CRS dimensions y (T): y scalar, or array/list of y scalars, in CRS dimensions - rounding (Optional[RoundingParam]): + rounding ([RoundingParam | None]): ROUND to apply round_() to pixel values, FLOOR to apply math.floor(). Supports RoundingMethod enum value or str value (case insensitive). diff --git a/python/idsse_common/idsse/common/sci/netcdf_io.py b/python/idsse_common/idsse/common/sci/netcdf_io.py index a6aaa67..ffcf763 100644 --- a/python/idsse_common/idsse/common/sci/netcdf_io.py +++ b/python/idsse_common/idsse/common/sci/netcdf_io.py @@ -13,7 +13,8 @@ import logging import os -from typing import List, Protocol, Tuple +from collections.abc import Sequence +from typing import Protocol from netCDF4 import Dataset # pylint: disable=no-name-in-module import h5netcdf as h5nc @@ -25,11 +26,11 @@ # cSpell:ignore ncattrs, getncattr, maskandscale class HasNcAttr(Protocol): """Protocol that allows retrieving attributes""" - def ncattrs(self) -> List[str]: + def ncattrs(self) -> Sequence[str]: """Gives access to list of keys Returns: - List[str]: Keys names for the attributes + Sequence[str]: Keys names for the attributes """ def getncattr(self, key: str) -> any: @@ -55,7 +56,7 @@ def read_netcdf_global_attrs(filepath: str) -> dict: return _read_attrs(Dataset(filepath)) -def read_netcdf(filepath: str, use_h5_lib = False) -> Tuple[dict, np.ndarray]: +def read_netcdf(filepath: str, use_h5_lib = False) -> tuple[dict, np.ndarray]: """Reads DAS Netcdf file. Args: @@ -64,7 +65,7 @@ def read_netcdf(filepath: str, use_h5_lib = False) -> Tuple[dict, np.ndarray]: If False, netCDF4 library will be used. Default is False (netcdf4 will be used). Returns: - Tuple[dict, np.ndarray]: Global attributes and data + tuple[dict, np.ndarray]: Global attributes and data """ if use_h5_lib: with h5nc.File(filepath, 'r') as nc_file: diff --git a/python/idsse_common/idsse/common/sci/utils.py b/python/idsse_common/idsse/common/sci/utils.py index 0d3b7f8..29a9675 100644 --- a/python/idsse_common/idsse/common/sci/utils.py +++ b/python/idsse_common/idsse/common/sci/utils.py @@ -11,7 +11,8 @@ # ------------------------------------------------------------------------------------ import logging -from typing import NewType, Sequence +from collections.abc import Sequence +from typing import NewType import numpy # import shapely diff --git a/python/idsse_common/idsse/common/utils.py b/python/idsse_common/idsse/common/utils.py index e1d4fd9..ea12da6 100644 --- a/python/idsse_common/idsse/common/utils.py +++ b/python/idsse_common/idsse/common/utils.py @@ -12,10 +12,11 @@ import copy import logging import math +from collections.abc import Sequence from datetime import datetime, timedelta, timezone from enum import Enum from subprocess import PIPE, Popen, TimeoutExpired -from typing import Any, Generator, Optional, Sequence, Union +from typing import Any, Generator from uuid import UUID logger = logging.getLogger(__name__) @@ -27,7 +28,7 @@ class RoundingMethod(Enum): FLOOR = 'FLOOR' -RoundingParam = Union[str, RoundingMethod] +RoundingParam = str | RoundingMethod class TimeDelta: @@ -84,7 +85,7 @@ def __delitem__(self, key): del self.__dict__[key] -def exec_cmd(commands: Sequence[str], timeout: Optional[int] = None) -> Sequence[str]: +def exec_cmd(commands: Sequence[str], timeout: int | None = None) -> Sequence[str]: """Execute the passed commands via a Popen call Args: @@ -161,7 +162,7 @@ def dict_copy_with(old_dict: dict, **kwargs) -> dict: def datetime_gen(dt_start: datetime, time_delta: timedelta, - dt_end: Optional[datetime] = None, + dt_end: datetime | None = None, max_num: int = 100) -> Generator[datetime, Any, None]: """Create a date/time sequence generator, given a starting date/time and a time stride @@ -169,7 +170,7 @@ def datetime_gen(dt_start: datetime, dt_start (datetime): Starting date/time, will be the first date/time made available time_delta (timedelta): Time delta, can be either positive or negative. The sign of this will be switch based on the order of start_dt and end_dt. - dt_end (datetime, optional): Ending date/time, will be the last, unless generation is + dt_end (datetime | None, optional): Ending date/time, will be the last, unless generation is halted by max_num. Defaults to None. max_num (int, optional): Max number of date/times that generator will return. Defaults to 100. diff --git a/python/idsse_common/idsse/common/validate_schema.py b/python/idsse_common/idsse/common/validate_schema.py index 35d236e..7404b6e 100644 --- a/python/idsse_common/idsse/common/validate_schema.py +++ b/python/idsse_common/idsse/common/validate_schema.py @@ -10,7 +10,6 @@ # ---------------------------------------------------------------------------------- import json import os -from typing import Optional, Union from jsonschema import Draft202012Validator, FormatChecker from jsonschema.protocols import Validator @@ -22,7 +21,7 @@ _draft = DRAFT202012 -def _get_refs(json_obj: Union[dict, list], result: Optional[set] = None) -> set: +def _get_refs(json_obj: dict | list, result: set | None = None) -> set: if result is None: result = set() if isinstance(json_obj, dict): diff --git a/python/idsse_common/test/sci/test_grid_proj.py b/python/idsse_common/test/sci/test_grid_proj.py index e551b72..aa5d602 100644 --- a/python/idsse_common/test/sci/test_grid_proj.py +++ b/python/idsse_common/test/sci/test_grid_proj.py @@ -13,7 +13,7 @@ # pylint: disable=missing-function-docstring,redefined-outer-name,invalid-name,protected-access # cspell:ignore pyproj -from typing import Tuple, List +from collections.abc import Sequence import numpy as np from pytest import approx, fixture, raises @@ -35,7 +35,7 @@ WIDTH = 2345 HEIGHT = 1597 -EXAMPLE_PIXELS: List[Tuple[int, int]] = [ +EXAMPLE_PIXELS: Sequence[tuple[int, int]] = [ (0, 0), (0, 1), (2000, 1500) @@ -53,7 +53,7 @@ # utility to roughly compare tuples of floats with less floating point precision -def approx_tuple(values: Tuple[float, float]) -> Tuple: +def approx_tuple(values: tuple[float, float]) -> tuple: return (approx(values[0]), approx(values[1])) diff --git a/python/idsse_common/test/sci/test_netcdf_io.py b/python/idsse_common/test/sci/test_netcdf_io.py index 2eced85..9c35edb 100644 --- a/python/idsse_common/test/sci/test_netcdf_io.py +++ b/python/idsse_common/test/sci/test_netcdf_io.py @@ -11,7 +11,6 @@ # pylint: disable=missing-function-docstring,redefined-outer-name,protected-access,unused-argument import os -from typing import Dict, Tuple from pytest import fixture, approx from numpy import ndarray @@ -46,7 +45,7 @@ # pytest fixtures @fixture -def example_netcdf_data() -> Tuple[Dict[str, any], ndarray]: +def example_netcdf_data() -> tuple[dict[str, any], ndarray]: return read_netcdf(EXAMPLE_NETCDF_FILEPATH) @@ -58,7 +57,7 @@ def test_read_netcdf_global_attrs(): assert attrs == EXAMPLE_ATTRIBUTES -def test_read_netcdf(example_netcdf_data: Tuple[Dict[str, any], ndarray]): +def test_read_netcdf(example_netcdf_data: tuple[dict[str, any], ndarray]): attrs, grid = example_netcdf_data assert grid.shape == (1597, 2345) @@ -71,7 +70,7 @@ def test_read_netcdf(example_netcdf_data: Tuple[Dict[str, any], ndarray]): assert attrs == EXAMPLE_ATTRIBUTES -def test_read_and_write_netcdf(example_netcdf_data: Tuple[Dict[str, any], ndarray]): +def test_read_and_write_netcdf(example_netcdf_data: tuple[dict[str, any], ndarray]): # cleanup existing test file if needed temp_netcdf_filepath = './tmp/test_netcdf_file.nc' if os.path.exists(temp_netcdf_filepath): @@ -94,7 +93,7 @@ def test_read_and_write_netcdf(example_netcdf_data: Tuple[Dict[str, any], ndarra os.remove(temp_netcdf_filepath) -def test_read_and_write_netcdf_with_h5nc(example_netcdf_data: Tuple[Dict[str, any], ndarray]): +def test_read_and_write_netcdf_with_h5nc(example_netcdf_data: tuple[dict[str, any], ndarray]): # create h5nc file temp_netcdf_h5_filepath = './tmp/test_netcdf_h5_file.nc' if os.path.exists(temp_netcdf_h5_filepath): diff --git a/python/idsse_common/test/test_aws_utils.py b/python/idsse_common/test/test_aws_utils.py index 8cc6631..caf8582 100644 --- a/python/idsse_common/test/test_aws_utils.py +++ b/python/idsse_common/test/test_aws_utils.py @@ -13,8 +13,8 @@ # pylint: disable=missing-function-docstring,redefined-outer-name,pointless-statement # pylint: disable=invalid-name,unused-argument +from collections.abc import Iterable, Sequence from datetime import datetime, timedelta, UTC -from typing import List from unittest.mock import Mock from pytest import fixture, MonkeyPatch @@ -55,7 +55,7 @@ def aws_utils_with_wild() -> AwsUtils: @fixture def mock_exec_cmd(monkeypatch: MonkeyPatch) -> Mock: - def get_files_for_dir(args: List[str]) -> List[str]: + def get_files_for_dir(args: Iterable[str]) -> Sequence[str]: hour = args[-1].split('/')[-3] return [f'blend.t{hour}z.core.f002.co.grib2', f'blend.t{hour}z.core.f003.co.grib2', diff --git a/python/idsse_common/test/test_publish_confirm.py b/python/idsse_common/test/test_publish_confirm.py index 7dbc9ea..e9262e9 100644 --- a/python/idsse_common/test/test_publish_confirm.py +++ b/python/idsse_common/test/test_publish_confirm.py @@ -13,7 +13,8 @@ # pylint: disable=too-few-public-methods,unused-argument from time import sleep -from typing import Callable, Union, Any, NamedTuple, Self +from collections.abc import Callable +from typing import Any, NamedTuple, Self from unittest.mock import Mock from pytest import fixture, raises, MonkeyPatch @@ -29,7 +30,7 @@ class Method(NamedTuple): """mock of pika.frame.Method data class""" - method: Union[Basic.Ack, Basic.Nack] + method: Basic.Ack | Basic.Nack class MockPika: