Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

lint: freeze & run Black version 24.02 #3131

Merged
merged 13 commits into from
Apr 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 8 additions & 14 deletions .github/workflows/style_type_checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,22 +9,16 @@ jobs:
steps:
- uses: actions/checkout@v3
- uses: extractions/setup-just@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Python 3.8
uses: actions/setup-python@v4
with:
python-version: '3.8'
- uses: actions/setup-python@v4
- name: Install dependencies
run: |
pip install .
pip install click black mypy
pip install types-python-dateutil
pip install types-waitress
pip install types-PyYAML
- name: Style and type checks
run: |
just black
just mypy
# todo: install also `black[jupyter]`
Borda marked this conversation as resolved.
Show resolved Hide resolved
pip install click "black==24.02" "mypy==1.8.0" \
types-python-dateutil types-waitress types-PyYAML
- name: Style check
run: just black
- name: Type check
run: just mypy
- name: Check license headers
run: just license
2 changes: 1 addition & 1 deletion Justfile
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ release:
python setup.py sdist

black:
black --check src test examples
black --check --color src test examples

mypy:
python setup.py type_check
Expand Down
1 change: 1 addition & 0 deletions examples/anomaly_detection.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
"""
This example shows how to do anomaly detection with DeepAR.
The model is first trained and then time-points with the largest negative log-likelihood are plotted.

"""
import numpy as np
from itertools import islice
Expand Down
1 change: 1 addition & 0 deletions examples/warm_start.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@

"""
This example show how to intialize the network with parameters from a model that was previously trained.

"""

from gluonts.dataset.repository import get_dataset, dataset_recipes
Expand Down
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
[tool.black]
target-version = ['py38']
line-length = 79

[tool.pytest.ini_options]
Expand Down
2 changes: 1 addition & 1 deletion src/gluonts/ext/rotbaum/_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ def fit(
# XGBoost, but True if one uses lightgbm.
model_is_already_trained: bool = False, # True if there is no need to
# train self.model
**kwargs
**kwargs,
):
"""
Fits self.model and partitions R^n into cells.
Expand Down
6 changes: 3 additions & 3 deletions src/gluonts/ext/rotbaum/_preprocess.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def __init__(
max_n_datapts: int = 400000,
seed: Optional[int] = None,
num_samples: Optional[int] = None,
**kwargs
**kwargs,
):
"""
Parameters
Expand Down Expand Up @@ -296,7 +296,7 @@ def __init__(
one_hot_encode: bool = True,
subtract_mean: bool = True,
count_nans: bool = False,
**kwargs
**kwargs,
):
if one_hot_encode:
assert cardinality != "ignore" or (
Expand All @@ -313,7 +313,7 @@ def __init__(
stratify_targets=stratify_targets,
n_ignore_last=n_ignore_last,
num_samples=num_samples,
**kwargs
**kwargs,
)

self.use_feat_static_real = use_feat_static_real
Expand Down
6 changes: 3 additions & 3 deletions src/gluonts/model/evaluation.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def evaluate_forecasts_raw(
batch_size: int = 100,
mask_invalid_label: bool = True,
allow_nan_forecast: bool = False,
seasonality: Optional[int] = None
seasonality: Optional[int] = None,
) -> dict:
"""
Evaluate ``forecasts`` by comparing them with ``test_data``, according
Expand Down Expand Up @@ -189,7 +189,7 @@ def evaluate_forecasts(
batch_size: int = 100,
mask_invalid_label: bool = True,
allow_nan_forecast: bool = False,
seasonality: Optional[int] = None
seasonality: Optional[int] = None,
) -> pd.DataFrame:
"""
Evaluate ``forecasts`` by comparing them with ``test_data``, according
Expand Down Expand Up @@ -243,7 +243,7 @@ def evaluate_model(
batch_size: int = 100,
mask_invalid_label: bool = True,
allow_nan_forecast: bool = False,
seasonality: Optional[int] = None
seasonality: Optional[int] = None,
) -> pd.DataFrame:
"""
Evaluate ``model`` when applied to ``test_data``, according
Expand Down
8 changes: 4 additions & 4 deletions src/gluonts/model/forecast_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ def __call__(
input_names: List[str],
output_transform: Optional[OutputTransform],
num_samples: Optional[int],
**kwargs
**kwargs,
) -> Iterator[Forecast]:
raise NotImplementedError()

Expand All @@ -111,7 +111,7 @@ def __call__(
input_names: List[str],
output_transform: Optional[OutputTransform],
num_samples: Optional[int],
**kwargs
**kwargs,
) -> Iterator[Forecast]:
for batch in inference_data_loader:
inputs = select(input_names, batch, ignore_missing=True)
Expand Down Expand Up @@ -155,7 +155,7 @@ def __call__(
input_names: List[str],
output_transform: Optional[OutputTransform],
num_samples: Optional[int],
**kwargs
**kwargs,
) -> Iterator[Forecast]:
for batch in inference_data_loader:
inputs = select(input_names, batch, ignore_missing=True)
Expand Down Expand Up @@ -205,7 +205,7 @@ def __call__(
input_names: List[str],
output_transform: Optional[OutputTransform],
num_samples: Optional[int],
**kwargs
**kwargs,
) -> Iterator[Forecast]:
for batch in inference_data_loader:
inputs = select(input_names, batch, ignore_missing=True)
Expand Down
4 changes: 2 additions & 2 deletions src/gluonts/mx/block/regularization.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def __init__(
weight: Optional[float] = None,
batch_axis: int = 1,
time_axis: int = 0,
**kwargs
**kwargs,
):
super().__init__(weight, batch_axis, **kwargs)
self._alpha = alpha
Expand Down Expand Up @@ -121,7 +121,7 @@ def __init__(
weight: Optional[float] = None,
batch_axis: int = 1,
time_axis: int = 0,
**kwargs
**kwargs,
):
super().__init__(weight, batch_axis, **kwargs)
self._beta = beta
Expand Down
2 changes: 1 addition & 1 deletion src/gluonts/mx/block/scaler.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ def __init__(
minimum_scale: float = 1e-10,
default_scale: Optional[float] = None,
*args,
**kwargs
**kwargs,
):
super().__init__(*args, **kwargs)
self.minimum_scale = minimum_scale
Expand Down
2 changes: 1 addition & 1 deletion src/gluonts/mx/block/sndense.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def __init__(
dtype="float32",
num_power_iter: int = 1,
ctx: Optional[mx.Context] = None,
**kwargs
**kwargs,
):
super().__init__(**kwargs)
self._coeff = coeff
Expand Down
2 changes: 1 addition & 1 deletion src/gluonts/mx/distribution/iresnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def __init__(
coeff: float = 0.9,
use_caching: bool = True,
*args,
**kwargs
**kwargs,
):
super().__init__(*args, **kwargs)
assert len(event_shape) == 1
Expand Down
2 changes: 1 addition & 1 deletion src/gluonts/mx/distribution/lds.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def _safe_split(x, num_outputs, axis, squeeze_axis, *args, **kwargs):
num_outputs=num_outputs,
squeeze_axis=squeeze_axis,
*args,
**kwargs
**kwargs,
)
return [x.squeeze(axis=axis)] if squeeze_axis else [x]

Expand Down
8 changes: 4 additions & 4 deletions src/gluonts/nursery/SCott/dataset_tools/algo_clustering.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,14 +232,14 @@ def KMeans_m5_dataset(
{
"target": ts_slice,
"start": unsplit_start,
} # , 'feat_static_cat': train_entry['feat_static_cat']}
)
}
) # , 'feat_static_cat': train_entry['feat_static_cat']}
whole_data.append(
{
"target": ts_slice,
"start": unsplit_start,
} # , 'feat_static_cat': train_entry['feat_static_cat']}
)
}
) # , 'feat_static_cat': train_entry['feat_static_cat']}
sample_id += 1
print(len(whole_data))
ret["group_ratio"] = [len(i) / len(whole_data) for i in dataset_group]
Expand Down
2 changes: 1 addition & 1 deletion src/gluonts/nursery/daf/tslib/nn/attention/selfattn.py
Original file line number Diff line number Diff line change
Expand Up @@ -309,7 +309,7 @@ def forward(
value: Tensor,
shape: Tensor,
*,
mask: Optional[BoolTensor] = None
mask: Optional[BoolTensor] = None,
) -> Tensor:
q, k, v = self._compute_qkv(value, shape)
score = self._compute_attn_score(q, k, mask)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -312,7 +312,7 @@ def log_prob(self, x, cond, *args, **kwargs):
cond.reshape(B * T, 1, -1),
time,
*args,
**kwargs
**kwargs,
)

return loss
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def __init__(
num_layers: int,
adj_matrix: Tensor,
use_mlp: bool = True,
**kwargs
**kwargs,
):
super().__init__(**kwargs)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def main(
surrogate[surrogate["name"]]
if surrogate["name"] in surrogate
else {}
)
),
)

# Then, we can create the recommender
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ def main(
surrogate[surrogate["name"]]
if surrogate["name"] in surrogate
else {}
)
),
)
elif recommender == "optimal":
recommender_args["tracker"] = tracker
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ def main(
input_flags=inputs,
output_normalization=outputs["normalization"],
impute_simulatable=outputs["imputation"],
**(_config[surrogate] if surrogate in _config else {})
**(_config[surrogate] if surrogate in _config else {}),
)

# And evaluate it
Expand Down
2 changes: 1 addition & 1 deletion src/gluonts/torch/model/i_transformer/estimator.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,7 +232,7 @@ def create_training_data_loader(
data: Dataset,
module: ITransformerLightningModule,
shuffle_buffer_length: Optional[int] = None,
**kwargs
**kwargs,
) -> Iterable:
data = Cyclic(data).stream()
instances = self._create_instance_splitter(module, "training").apply(
Expand Down
2 changes: 1 addition & 1 deletion src/gluonts/torch/model/lag_tst/estimator.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,7 @@ def create_training_data_loader(
data: Dataset,
module: LagTSTLightningModule,
shuffle_buffer_length: Optional[int] = None,
**kwargs
**kwargs,
) -> Iterable:
data = Cyclic(data).stream()
instances = self._create_instance_splitter(module, "training").apply(
Expand Down
2 changes: 1 addition & 1 deletion src/gluonts/torch/model/patch_tst/estimator.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,7 +229,7 @@ def create_training_data_loader(
data: Dataset,
module: PatchTSTLightningModule,
shuffle_buffer_length: Optional[int] = None,
**kwargs
**kwargs,
) -> Iterable:
data = Cyclic(data).stream()
instances = self._create_instance_splitter(module, "training").apply(
Expand Down
2 changes: 1 addition & 1 deletion src/gluonts/torch/model/wavenet/estimator.py
Original file line number Diff line number Diff line change
Expand Up @@ -348,7 +348,7 @@ def create_training_data_loader(
data: Dataset,
module: WaveNetLightningModule,
shuffle_buffer_length: Optional[int] = None,
**kwargs
**kwargs,
) -> Iterable:
data = Cyclic(data).stream()
instances = self._create_instance_splitter("training").apply(
Expand Down
2 changes: 1 addition & 1 deletion test/nursery/sagemaker_sdk/test_entry_point_scripts.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ def test_train_script(dataset_name, custom_dataset):
estimator = estimator_cls.from_hyperparameters(
prediction_length=dataset.metadata.prediction_length,
freq=dataset.metadata.freq,
**hyperparameters
**hyperparameters,
)
serialized = serde.dump_json(estimator)
with open(temp_dir_path / "estimator.json", "w") as estimator_file:
Expand Down
Loading