diff --git a/disdrodb/api/checks.py b/disdrodb/api/checks.py index f5ff494c..b79b38f9 100644 --- a/disdrodb/api/checks.py +++ b/disdrodb/api/checks.py @@ -73,8 +73,7 @@ def check_url(url: str) -> bool: if re.match(regex, url): return True - else: - return False + return False def check_path_is_a_directory(dir_path, path_name=""): diff --git a/disdrodb/api/io.py b/disdrodb/api/io.py index 0b115a99..4409771c 100644 --- a/disdrodb/api/io.py +++ b/disdrodb/api/io.py @@ -270,10 +270,9 @@ def available_campaigns(product, data_sources=None, return_tuple=True, base_dir= campaigns = [info[1] for info in list_available_stations] campaigns = np.unique(campaigns).tolist() return campaigns - else: - data_source_campaigns = [(info[0], info[1]) for info in list_available_stations] - data_source_campaigns = list(set(data_source_campaigns)) - return data_source_campaigns + data_source_campaigns = [(info[0], info[1]) for info in list_available_stations] + data_source_campaigns = list(set(data_source_campaigns)) + return data_source_campaigns def available_stations( @@ -320,7 +319,7 @@ def available_stations( if return_tuple: return list_info - else: - # TODO: ENSURE THAT NO DUPLICATED STATION NAMES ? - list_stations = [info[2] for info in list_info] - return list_stations + + # TODO: ENSURE THAT NO DUPLICATED STATION NAMES ? + list_stations = [info[2] for info in list_info] + return list_stations diff --git a/disdrodb/data_transfer/download_data.py b/disdrodb/data_transfer/download_data.py index 479df5cc..e358524a 100644 --- a/disdrodb/data_transfer/download_data.py +++ b/disdrodb/data_transfer/download_data.py @@ -141,7 +141,7 @@ def download_archive( metadata_filepaths = _select_metadata_with_remote_data_url(metadata_filepaths) if len(metadata_filepaths) == 0: print("No available remote data to download.") - return None + return # Try to download the data # - It will download data only if the disdrodb_data_url is specified ! @@ -211,8 +211,7 @@ def _is_valid_disdrodb_data_url(disdrodb_data_url): """Check if it is a valid disdrodb_data_url.""" if isinstance(disdrodb_data_url, str) and len(disdrodb_data_url) > 10: return True - else: - return False + return False def _has_disdrodb_data_url(metadata_filepath): diff --git a/disdrodb/l0/check_configs.py b/disdrodb/l0/check_configs.py index 204a63a2..5446b95c 100644 --- a/disdrodb/l0/check_configs.py +++ b/disdrodb/l0/check_configs.py @@ -225,6 +225,7 @@ def check_list_length(cls, value): if len(value) != 2: raise ValueError(f"data_range must have exactly 2 keys, {len(value)} element have been provided.") return value + return None def _check_raw_data_format(sensor_name: str) -> None: diff --git a/disdrodb/l0/l0_processing.py b/disdrodb/l0/l0_processing.py index 2d879560..731ea418 100644 --- a/disdrodb/l0/l0_processing.py +++ b/disdrodb/l0/l0_processing.py @@ -567,7 +567,7 @@ def run_l0b( # Skip run_l0b processing if the raw data are netCDFs if attrs["raw_data_format"] == "netcdf": - return None + return # -----------------------------------------------------------------. # Start L0B processing @@ -635,7 +635,7 @@ def run_l0b( timedelta_str = str(datetime.timedelta(seconds=time.time() - t_i)) msg = f"L0B processing of station_name {station_name} completed in {timedelta_str}" log_info(logger=logger, msg=msg, verbose=verbose) - return None + return def run_l0b_from_nc( diff --git a/disdrodb/l0/l0a_processing.py b/disdrodb/l0/l0a_processing.py index a5139058..210a605a 100644 --- a/disdrodb/l0/l0a_processing.py +++ b/disdrodb/l0/l0a_processing.py @@ -131,7 +131,7 @@ def read_raw_file( def _check_df_sanitizer_fun(df_sanitizer_fun): """Check that ``df`` is the only argument of ``df_sanitizer_fun``.""" if df_sanitizer_fun is None: - return None + return if not callable(df_sanitizer_fun): raise ValueError("'df_sanitizer_fun' must be a function.") if not np.all(np.isin(inspect.getfullargspec(df_sanitizer_fun).args, ["df"])): @@ -488,7 +488,7 @@ def replace_nan_flags(df, sensor_name, verbose=False): if n_nan_flags_values > 0: msg = f"In variable {var}, {n_nan_flags_values} values were nan_flags and were replaced to np.nan." log_info(logger=logger, msg=msg, verbose=verbose) - df[var][is_a_nan_flag] = np.nan + df.loc[is_a_nan_flag, var] = np.nan # Return dataframe return df diff --git a/disdrodb/l0/l0b_nc_processing.py b/disdrodb/l0/l0b_nc_processing.py index dd0abd72..132623ba 100644 --- a/disdrodb/l0/l0b_nc_processing.py +++ b/disdrodb/l0/l0b_nc_processing.py @@ -127,7 +127,7 @@ def add_dataset_missing_variables(ds, missing_vars, sensor_name): # Get variable dimension dims = var_dims_dict[var] # Retrieve expected shape - expected_shape = [ds.dims[dim] for dim in dims] + expected_shape = [ds.sizes[dim] for dim in dims] # Create DataArray arr = np.zeros(expected_shape) * np.nan da = xr.DataArray(arr, dims=dims) diff --git a/disdrodb/l0/template_tools.py b/disdrodb/l0/template_tools.py index 71637df8..21fbab95 100644 --- a/disdrodb/l0/template_tools.py +++ b/disdrodb/l0/template_tools.py @@ -329,8 +329,7 @@ def str_has_decimal_digits(string: str) -> bool: """ if len(string.split(".")) == 2: return True - else: - return False + return False def get_decimal_ndigits(string: str) -> int: @@ -348,8 +347,7 @@ def get_decimal_ndigits(string: str) -> int: """ if str_has_decimal_digits(string): return len(string.split(".")[1]) - else: - return 0 + return 0 def get_natural_ndigits(string: str) -> int: @@ -369,8 +367,7 @@ def get_natural_ndigits(string: str) -> int: return len(string.replace("-", "")) if str_has_decimal_digits(string): return len(string.split(".")[0].replace("-", "")) - else: - return 0 + return 0 def get_ndigits(string: str) -> int: @@ -392,8 +389,7 @@ def get_ndigits(string: str) -> int: string = string.replace("-", "") if str_has_decimal_digits(string): return len(string) - 1 # remove . - else: - return len(string) + return len(string) def get_nchar(string: str) -> int: diff --git a/disdrodb/metadata/checks.py b/disdrodb/metadata/checks.py index 0be7b113..0dc05f40 100644 --- a/disdrodb/metadata/checks.py +++ b/disdrodb/metadata/checks.py @@ -169,12 +169,10 @@ def _check_lonlat_type(longitude, latitude): def _check_lonlat_validity(longitude, latitude): if longitude == -9999 or latitude == -9999: raise ValueError("Missing lat lon coordinates (-9999).") - elif longitude > 180 or longitude < -180: + if longitude > 180 or longitude < -180: raise ValueError("Invalid longitude (outside [-180, 180])") - elif latitude > 90 or latitude < -90: + if latitude > 90 or latitude < -90: raise ValueError("Invalid latitude (outside [-90, 90])") - else: - pass def check_metadata_geolocation(metadata) -> None: @@ -550,8 +548,7 @@ def check_archive_metadata_compliance(base_dir: Optional[str] = None, raise_erro msg = msg + f"The error is: {e}." if raise_error: raise ValueError(msg) - else: - print(msg) + print(msg) return is_valid diff --git a/disdrodb/tests/test_l0/test_config_files.py b/disdrodb/tests/test_l0/test_config_files.py index c0a9be30..200ba62a 100644 --- a/disdrodb/tests/test_l0/test_config_files.py +++ b/disdrodb/tests/test_l0/test_config_files.py @@ -90,10 +90,8 @@ def is_sorted_int_keys(obj: list) -> bool: if isinstance(obj, list): if len(obj) == 0: return True - else: - return all(isinstance(x, int) for x in obj) and obj == sorted(obj) - else: - return False + return all(isinstance(x, int) for x in obj) and obj == sorted(obj) + return False def is_numeric_list(obj: list) -> bool: @@ -112,10 +110,8 @@ def is_numeric_list(obj: list) -> bool: if isinstance(obj, list): if len(obj) == 0: return True - else: - return all(isinstance(x, (int, float)) for x in obj) - else: - return False + return all(isinstance(x, (int, float)) for x in obj) + return False def is_string_list(obj: list) -> bool: @@ -134,10 +130,8 @@ def is_string_list(obj: list) -> bool: if isinstance(obj, list): if len(obj) == 0: return True - else: - return all(isinstance(x, str) for x in obj) - else: - return False + return all(isinstance(x, str) for x in obj) + return False def validate_schema_pytest(schema_to_validate: Union[str, list], schema: BaseModel) -> bool: diff --git a/disdrodb/tests/test_l0/test_l0a_processing.py b/disdrodb/tests/test_l0/test_l0a_processing.py index de144628..ac490d80 100644 --- a/disdrodb/tests/test_l0/test_l0a_processing.py +++ b/disdrodb/tests/test_l0/test_l0a_processing.py @@ -597,10 +597,10 @@ def test_read_raw_files(): # Create a test dataframe df1 = pd.DataFrame( - {"time": pd.date_range(start="2022-01-01", end="2022-01-02", freq="H"), "value": np.random.rand(25)}, + {"time": pd.date_range(start="2022-01-01", end="2022-01-02", freq="h"), "value": np.random.rand(25)}, ) df2 = pd.DataFrame( - {"time": pd.date_range(start="2022-01-03", end="2022-01-04", freq="H"), "value": np.random.rand(25)}, + {"time": pd.date_range(start="2022-01-03", end="2022-01-04", freq="h"), "value": np.random.rand(25)}, ) df_list = [df1, df2] @@ -620,8 +620,9 @@ def test_read_raw_files(): def mock_process_raw_file(filepath, column_names, reader_kwargs, df_sanitizer_fun, sensor_name, verbose): if filepath == "test_file1.csv": return df1 - elif filepath == "test_file2.csv": + if filepath == "test_file2.csv": return df2 + return None # Monkey patch the function l0a_processing.process_raw_file = mock_process_raw_file diff --git a/disdrodb/tests/test_l0/test_l0b_processing.py b/disdrodb/tests/test_l0/test_l0b_processing.py index cd5b20aa..75da8755 100644 --- a/disdrodb/tests/test_l0/test_l0b_processing.py +++ b/disdrodb/tests/test_l0/test_l0b_processing.py @@ -97,7 +97,7 @@ def define_test_dummy_configs(): def test_create_l0b_from_l0a(create_test_config_files): # Create a sample DataFrame df = pd.DataFrame({ - "time": pd.date_range("2022-01-01", periods=10, freq="H"), + "time": pd.date_range("2022-01-01", periods=10, freq="h"), "raw_drop_concentration": np.random.rand(10), "raw_drop_average_velocity": np.random.rand(10), "raw_drop_number": np.random.rand(10), diff --git a/disdrodb/utils/compression.py b/disdrodb/utils/compression.py index ef08ba7f..7f2a1dd5 100644 --- a/disdrodb/utils/compression.py +++ b/disdrodb/utils/compression.py @@ -169,8 +169,7 @@ def _compress_file(filepath: str, method: str, skip: bool) -> str: if skip: print(f"File {filepath} is already compressed. Skipping.") return filepath - else: - raise ValueError(f"File {filepath} is already compressed !") + raise ValueError(f"File {filepath} is already compressed !") extension = COMPRESSION_OPTIONS[method] archive_name = os.path.basename(filepath) + extension diff --git a/disdrodb/utils/directories.py b/disdrodb/utils/directories.py index 79f03bcb..43fefc1b 100644 --- a/disdrodb/utils/directories.py +++ b/disdrodb/utils/directories.py @@ -51,8 +51,7 @@ def list_paths(dir_path, glob_pattern, recursive=False): """Return a list of filepaths and directory paths.""" if not recursive: return glob.glob(os.path.join(dir_path, glob_pattern)) - else: - return _recursive_glob(dir_path, glob_pattern) + return _recursive_glob(dir_path, glob_pattern) def list_files(dir_path, glob_pattern, recursive=False): @@ -125,8 +124,7 @@ def is_empty_directory(path): paths = os.listdir(path) if len(paths) == 0: return True - else: - return False + return False def _remove_file_or_directories(path): @@ -155,7 +153,7 @@ def remove_if_exists(path: str, force: bool = False) -> None: """ # If the path does not exist, do nothing if not os.path.exists(path): - return None + return # If the path exists and force=False, raise Error if not force: diff --git a/disdrodb/utils/logger.py b/disdrodb/utils/logger.py index 35d06fcb..4be185e2 100644 --- a/disdrodb/utils/logger.py +++ b/disdrodb/utils/logger.py @@ -200,7 +200,7 @@ def define_summary_log(list_logs): """ # LogCaptureHandler of pytest does not have baseFilename attribute, so it returns None if list_logs[0] is None: - return None + return station_name, logs_dir = _get_logs_dir(list_logs) diff --git a/pyproject.toml b/pyproject.toml index 50a5a8df..6d64a7ce 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -153,7 +153,7 @@ select = [ # flake8-bugbear # "B", # flake8-return - # "RET", + "RET", # flake8-unused-arguments # "ARG", # flake8-raise @@ -192,6 +192,7 @@ ignore = [ "PERF203", "PLW2901", "B904", + "RET504", # Docstyle Rules "D404", # Docstring can't start with "This"