Skip to content

Commit

Permalink
Add RET rules
Browse files Browse the repository at this point in the history
  • Loading branch information
ghiggi committed Apr 2, 2024
1 parent bd70631 commit 165e1fc
Show file tree
Hide file tree
Showing 16 changed files with 41 additions and 57 deletions.
3 changes: 1 addition & 2 deletions disdrodb/api/checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,7 @@ def check_url(url: str) -> bool:

if re.match(regex, url):
return True
else:
return False
return False


def check_path_is_a_directory(dir_path, path_name=""):
Expand Down
15 changes: 7 additions & 8 deletions disdrodb/api/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -270,10 +270,9 @@ def available_campaigns(product, data_sources=None, return_tuple=True, base_dir=
campaigns = [info[1] for info in list_available_stations]
campaigns = np.unique(campaigns).tolist()
return campaigns
else:
data_source_campaigns = [(info[0], info[1]) for info in list_available_stations]
data_source_campaigns = list(set(data_source_campaigns))
return data_source_campaigns
data_source_campaigns = [(info[0], info[1]) for info in list_available_stations]
data_source_campaigns = list(set(data_source_campaigns))
return data_source_campaigns

Check warning on line 275 in disdrodb/api/io.py

View check run for this annotation

Codecov / codecov/patch

disdrodb/api/io.py#L273-L275

Added lines #L273 - L275 were not covered by tests


def available_stations(
Expand Down Expand Up @@ -320,7 +319,7 @@ def available_stations(

if return_tuple:
return list_info
else:
# TODO: ENSURE THAT NO DUPLICATED STATION NAMES ?
list_stations = [info[2] for info in list_info]
return list_stations

# TODO: ENSURE THAT NO DUPLICATED STATION NAMES ?
list_stations = [info[2] for info in list_info]
return list_stations

Check warning on line 325 in disdrodb/api/io.py

View check run for this annotation

Codecov / codecov/patch

disdrodb/api/io.py#L324-L325

Added lines #L324 - L325 were not covered by tests
5 changes: 2 additions & 3 deletions disdrodb/data_transfer/download_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ def download_archive(
metadata_filepaths = _select_metadata_with_remote_data_url(metadata_filepaths)
if len(metadata_filepaths) == 0:
print("No available remote data to download.")
return None
return

# Try to download the data
# - It will download data only if the disdrodb_data_url is specified !
Expand Down Expand Up @@ -211,8 +211,7 @@ def _is_valid_disdrodb_data_url(disdrodb_data_url):
"""Check if it is a valid disdrodb_data_url."""
if isinstance(disdrodb_data_url, str) and len(disdrodb_data_url) > 10:
return True
else:
return False
return False


def _has_disdrodb_data_url(metadata_filepath):
Expand Down
1 change: 1 addition & 0 deletions disdrodb/l0/check_configs.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,6 +225,7 @@ def check_list_length(cls, value):
if len(value) != 2:
raise ValueError(f"data_range must have exactly 2 keys, {len(value)} element have been provided.")
return value
return None


def _check_raw_data_format(sensor_name: str) -> None:
Expand Down
4 changes: 2 additions & 2 deletions disdrodb/l0/l0_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -567,7 +567,7 @@ def run_l0b(

# Skip run_l0b processing if the raw data are netCDFs
if attrs["raw_data_format"] == "netcdf":
return None
return

# -----------------------------------------------------------------.
# Start L0B processing
Expand Down Expand Up @@ -635,7 +635,7 @@ def run_l0b(
timedelta_str = str(datetime.timedelta(seconds=time.time() - t_i))
msg = f"L0B processing of station_name {station_name} completed in {timedelta_str}"
log_info(logger=logger, msg=msg, verbose=verbose)
return None
return


def run_l0b_from_nc(
Expand Down
4 changes: 2 additions & 2 deletions disdrodb/l0/l0a_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ def read_raw_file(
def _check_df_sanitizer_fun(df_sanitizer_fun):
"""Check that ``df`` is the only argument of ``df_sanitizer_fun``."""
if df_sanitizer_fun is None:
return None
return
if not callable(df_sanitizer_fun):
raise ValueError("'df_sanitizer_fun' must be a function.")
if not np.all(np.isin(inspect.getfullargspec(df_sanitizer_fun).args, ["df"])):
Expand Down Expand Up @@ -488,7 +488,7 @@ def replace_nan_flags(df, sensor_name, verbose=False):
if n_nan_flags_values > 0:
msg = f"In variable {var}, {n_nan_flags_values} values were nan_flags and were replaced to np.nan."
log_info(logger=logger, msg=msg, verbose=verbose)
df[var][is_a_nan_flag] = np.nan
df.loc[is_a_nan_flag, var] = np.nan
# Return dataframe
return df

Expand Down
2 changes: 1 addition & 1 deletion disdrodb/l0/l0b_nc_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ def add_dataset_missing_variables(ds, missing_vars, sensor_name):
# Get variable dimension
dims = var_dims_dict[var]
# Retrieve expected shape
expected_shape = [ds.dims[dim] for dim in dims]
expected_shape = [ds.sizes[dim] for dim in dims]
# Create DataArray
arr = np.zeros(expected_shape) * np.nan
da = xr.DataArray(arr, dims=dims)
Expand Down
12 changes: 4 additions & 8 deletions disdrodb/l0/template_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -329,8 +329,7 @@ def str_has_decimal_digits(string: str) -> bool:
"""
if len(string.split(".")) == 2:
return True
else:
return False
return False


def get_decimal_ndigits(string: str) -> int:
Expand All @@ -348,8 +347,7 @@ def get_decimal_ndigits(string: str) -> int:
"""
if str_has_decimal_digits(string):
return len(string.split(".")[1])
else:
return 0
return 0


def get_natural_ndigits(string: str) -> int:
Expand All @@ -369,8 +367,7 @@ def get_natural_ndigits(string: str) -> int:
return len(string.replace("-", ""))
if str_has_decimal_digits(string):
return len(string.split(".")[0].replace("-", ""))
else:
return 0
return 0


def get_ndigits(string: str) -> int:
Expand All @@ -392,8 +389,7 @@ def get_ndigits(string: str) -> int:
string = string.replace("-", "")
if str_has_decimal_digits(string):
return len(string) - 1 # remove .
else:
return len(string)
return len(string)


def get_nchar(string: str) -> int:
Expand Down
9 changes: 3 additions & 6 deletions disdrodb/metadata/checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,12 +169,10 @@ def _check_lonlat_type(longitude, latitude):
def _check_lonlat_validity(longitude, latitude):
if longitude == -9999 or latitude == -9999:
raise ValueError("Missing lat lon coordinates (-9999).")
elif longitude > 180 or longitude < -180:
if longitude > 180 or longitude < -180:
raise ValueError("Invalid longitude (outside [-180, 180])")
elif latitude > 90 or latitude < -90:
if latitude > 90 or latitude < -90:
raise ValueError("Invalid latitude (outside [-90, 90])")
else:
pass


def check_metadata_geolocation(metadata) -> None:
Expand Down Expand Up @@ -550,8 +548,7 @@ def check_archive_metadata_compliance(base_dir: Optional[str] = None, raise_erro
msg = msg + f"The error is: {e}."
if raise_error:
raise ValueError(msg)
else:
print(msg)
print(msg)

return is_valid

Expand Down
18 changes: 6 additions & 12 deletions disdrodb/tests/test_l0/test_config_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,10 +90,8 @@ def is_sorted_int_keys(obj: list) -> bool:
if isinstance(obj, list):
if len(obj) == 0:
return True
else:
return all(isinstance(x, int) for x in obj) and obj == sorted(obj)
else:
return False
return all(isinstance(x, int) for x in obj) and obj == sorted(obj)
return False


def is_numeric_list(obj: list) -> bool:
Expand All @@ -112,10 +110,8 @@ def is_numeric_list(obj: list) -> bool:
if isinstance(obj, list):
if len(obj) == 0:
return True
else:
return all(isinstance(x, (int, float)) for x in obj)
else:
return False
return all(isinstance(x, (int, float)) for x in obj)
return False


def is_string_list(obj: list) -> bool:
Expand All @@ -134,10 +130,8 @@ def is_string_list(obj: list) -> bool:
if isinstance(obj, list):
if len(obj) == 0:
return True
else:
return all(isinstance(x, str) for x in obj)
else:
return False
return all(isinstance(x, str) for x in obj)
return False


def validate_schema_pytest(schema_to_validate: Union[str, list], schema: BaseModel) -> bool:
Expand Down
7 changes: 4 additions & 3 deletions disdrodb/tests/test_l0/test_l0a_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -597,10 +597,10 @@ def test_read_raw_files():

# Create a test dataframe
df1 = pd.DataFrame(
{"time": pd.date_range(start="2022-01-01", end="2022-01-02", freq="H"), "value": np.random.rand(25)},
{"time": pd.date_range(start="2022-01-01", end="2022-01-02", freq="h"), "value": np.random.rand(25)},
)
df2 = pd.DataFrame(
{"time": pd.date_range(start="2022-01-03", end="2022-01-04", freq="H"), "value": np.random.rand(25)},
{"time": pd.date_range(start="2022-01-03", end="2022-01-04", freq="h"), "value": np.random.rand(25)},
)
df_list = [df1, df2]

Expand All @@ -620,8 +620,9 @@ def test_read_raw_files():
def mock_process_raw_file(filepath, column_names, reader_kwargs, df_sanitizer_fun, sensor_name, verbose):
if filepath == "test_file1.csv":
return df1
elif filepath == "test_file2.csv":
if filepath == "test_file2.csv":
return df2
return None

# Monkey patch the function
l0a_processing.process_raw_file = mock_process_raw_file
Expand Down
2 changes: 1 addition & 1 deletion disdrodb/tests/test_l0/test_l0b_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def define_test_dummy_configs():
def test_create_l0b_from_l0a(create_test_config_files):
# Create a sample DataFrame
df = pd.DataFrame({
"time": pd.date_range("2022-01-01", periods=10, freq="H"),
"time": pd.date_range("2022-01-01", periods=10, freq="h"),
"raw_drop_concentration": np.random.rand(10),
"raw_drop_average_velocity": np.random.rand(10),
"raw_drop_number": np.random.rand(10),
Expand Down
3 changes: 1 addition & 2 deletions disdrodb/utils/compression.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,8 +169,7 @@ def _compress_file(filepath: str, method: str, skip: bool) -> str:
if skip:
print(f"File {filepath} is already compressed. Skipping.")
return filepath
else:
raise ValueError(f"File {filepath} is already compressed !")
raise ValueError(f"File {filepath} is already compressed !")

extension = COMPRESSION_OPTIONS[method]
archive_name = os.path.basename(filepath) + extension
Expand Down
8 changes: 3 additions & 5 deletions disdrodb/utils/directories.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,7 @@ def list_paths(dir_path, glob_pattern, recursive=False):
"""Return a list of filepaths and directory paths."""
if not recursive:
return glob.glob(os.path.join(dir_path, glob_pattern))
else:
return _recursive_glob(dir_path, glob_pattern)
return _recursive_glob(dir_path, glob_pattern)


def list_files(dir_path, glob_pattern, recursive=False):
Expand Down Expand Up @@ -125,8 +124,7 @@ def is_empty_directory(path):
paths = os.listdir(path)
if len(paths) == 0:
return True
else:
return False
return False


def _remove_file_or_directories(path):
Expand Down Expand Up @@ -155,7 +153,7 @@ def remove_if_exists(path: str, force: bool = False) -> None:
"""
# If the path does not exist, do nothing
if not os.path.exists(path):
return None
return

# If the path exists and force=False, raise Error
if not force:
Expand Down
2 changes: 1 addition & 1 deletion disdrodb/utils/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ def define_summary_log(list_logs):
"""
# LogCaptureHandler of pytest does not have baseFilename attribute, so it returns None
if list_logs[0] is None:
return None
return

station_name, logs_dir = _get_logs_dir(list_logs)

Expand Down
3 changes: 2 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ select = [
# flake8-bugbear
# "B",
# flake8-return
# "RET",
"RET",
# flake8-unused-arguments
# "ARG",
# flake8-raise
Expand Down Expand Up @@ -192,6 +192,7 @@ ignore = [
"PERF203",
"PLW2901",
"B904",
"RET504",

# Docstyle Rules
"D404", # Docstring can't start with "This"
Expand Down

0 comments on commit 165e1fc

Please sign in to comment.