Skip to content

Commit

Permalink
Enable dependendabot version updates for python code (#1086)
Browse files Browse the repository at this point in the history
Resolves #814 - Dependabot version updates for python + Initial batch update
  • Loading branch information
rashidakanchwala committed Sep 23, 2022
1 parent 388a1af commit edf45a4
Show file tree
Hide file tree
Showing 16 changed files with 73 additions and 70 deletions.
21 changes: 11 additions & 10 deletions .circleci/continue_config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ orbs:

# No windows executor is listed here since windows builds use win/default and modify
# the Python version through the conda environment.
# Python 3.10 is pinned to 3.10.6 to work with current version of mypy
executors:
docker:
parameters:
Expand Down Expand Up @@ -307,11 +308,11 @@ workflows:
- e2e_tests:
matrix:
parameters:
python_version: ['3.7', '3.8', '3.9', '3.10']
python_version: ['3.7', '3.8', '3.9', '3.10.6']
- win_e2e_tests:
matrix:
parameters:
python_version: ['3.7', '3.8', '3.9', '3.10']
python_version: ['3.7', '3.8', '3.9', '3.10.6']
filters:
branches:
only:
Expand All @@ -320,11 +321,11 @@ workflows:
- unit_tests:
matrix:
parameters:
python_version: ['3.7', '3.8', '3.9', '3.10']
python_version: ['3.7', '3.8', '3.9', '3.10.6']
- win_unit_tests:
matrix:
parameters:
python_version: ['3.7', '3.8', '3.9', '3.10']
python_version: ['3.7', '3.8', '3.9', '3.10.6']
filters:
branches:
only:
Expand All @@ -333,7 +334,7 @@ workflows:
- lint:
matrix:
parameters:
python_version: ['3.7', '3.8', '3.9', '3.10']
python_version: ['3.7', '3.8', '3.9', '3.10.6']
- all_circleci_checks_succeeded:
requires:
- e2e_tests
Expand All @@ -359,21 +360,21 @@ workflows:
- e2e_tests:
matrix:
parameters:
python_version: ['3.7', '3.8', '3.9', '3.10']
python_version: ['3.7', '3.8', '3.9', '3.10.6']
- win_e2e_tests:
matrix:
parameters:
python_version: ['3.7', '3.8', '3.9', '3.10']
python_version: ['3.7', '3.8', '3.9', '3.10.6']
- unit_tests:
matrix:
parameters:
python_version: ['3.7', '3.8', '3.9', '3.10']
python_version: ['3.7', '3.8', '3.9', '3.10.6']
- win_unit_tests:
matrix:
parameters:
python_version: ['3.7', '3.8', '3.9', '3.10']
python_version: ['3.7', '3.8', '3.9', '3.10.6']
- lint:
matrix:
parameters:
python_version: ['3.7', '3.8', '3.9', '3.10']
python_version: ['3.7', '3.8', '3.9', '3.10.6']
- javascript_lint_and_tests
6 changes: 2 additions & 4 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,9 @@ updates:
- package-ecosystem: "pip" # See documentation for possible values
directory: "/package" # Location of package manifests
schedule:
interval: "daily"
target-branch: "dependency-update"
interval: "weekly"
ignore:
- dependency-name: "types-*"
labels:
- "python"
- "Python"
- "dependencies"
open-pull-requests-limit: 50
2 changes: 1 addition & 1 deletion package/kedro_viz/api/apps.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,6 @@ async def index():

@app.get("/api/main", response_class=JSONResponse)
async def main():
return json.loads(Path(filepath).read_text())
return json.loads(Path(filepath).read_text(encoding="utf8"))

return app
2 changes: 1 addition & 1 deletion package/kedro_viz/api/graphql/schema.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
"""`kedro_viz.api.graphql.schema` defines the GraphQL schema: queries, mutations
and subscriptions.."""
# pylint: disable=no-self-use,missing-function-docstring,missing-class-docstring
# pylint: disable=missing-function-docstring,missing-class-docstring

from __future__ import annotations

Expand Down
2 changes: 1 addition & 1 deletion package/kedro_viz/api/rest/router.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ async def main():

@router.get(
"/nodes/{node_id}",
response_model=NodeMetadataAPIResponse, # type: ignore
response_model=NodeMetadataAPIResponse,
response_model_exclude_none=True,
)
async def get_single_node_metadata(node_id: str):
Expand Down
1 change: 0 additions & 1 deletion package/kedro_viz/integrations/kedro/sqlite_store.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
"""kedro_viz.intergrations.kedro.sqlite_store is a child of BaseSessionStore
which stores sessions data in the SQLite database"""
# pylint: disable=too-many-ancestors

import json
import logging
Expand Down
4 changes: 3 additions & 1 deletion package/kedro_viz/integrations/kedro/telemetry.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,9 @@ def get_heap_app_id(project_path: Path) -> Optional[str]:
telemetry_file_path = project_path / ".telemetry"
if not telemetry_file_path.exists():
return None
with open(telemetry_file_path) as telemetry_file:
with open(
telemetry_file_path, encoding="utf8"
) as telemetry_file: # pylint: disable: unspecified-encoding
telemetry = yaml.safe_load(telemetry_file)
if _is_valid_syntax(telemetry) and telemetry["consent"]:
return _get_heap_app_id()
Expand Down
4 changes: 2 additions & 2 deletions package/kedro_viz/launchers/jupyter.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@

import IPython
import requests
from IPython.core.display import HTML, display
from IPython.display import HTML, display

from kedro_viz.server import DEFAULT_HOST, DEFAULT_PORT, run_server

Expand Down Expand Up @@ -73,7 +73,7 @@ def _wait_for(
def _check_viz_up(host: str, port: int): # pragma: no cover
url = f"http://{host}:{port}"
try:
response = requests.get(url)
response = requests.get(url, timeout=10)
except requests.ConnectionError:
return False

Expand Down
26 changes: 14 additions & 12 deletions package/kedro_viz/models/flowchart.py
Original file line number Diff line number Diff line change
Expand Up @@ -380,17 +380,21 @@ class TaskNodeMetadata(GraphNodeMetadata):
"""Represent the metadata of a TaskNode"""

# the source code of the node's function
code: str = field(init=False)
code: Optional[str] = field(init=False)

# path to the file where the node is defined
filepath: str = field(init=False)
filepath: Optional[str] = field(init=False)

# parameters of the node, if available
parameters: Dict = field(init=False)
parameters: Optional[Dict] = field(init=False, default=None)

# command to run the pipeline to this node
run_command: Optional[str] = field(init=False, default=None)

inputs: List[str] = field(init=False)

outputs: List[str] = field(init=False)

# the task node to which this metadata belongs
task_node: InitVar[TaskNode]

Expand Down Expand Up @@ -468,11 +472,9 @@ def is_plot_node(self):
Currently it only recognises one underlying dataset as a plot node.
In the future, we might want to make this generic.
"""
return (
self.dataset_type
== "kedro.extras.datasets.plotly.plotly_dataset.PlotlyDataSet"
or self.dataset_type
== "kedro.extras.datasets.plotly.json_dataset.JSONDataSet"
return self.dataset_type in (
"kedro.extras.datasets.plotly.plotly_dataset.PlotlyDataSet",
"kedro.extras.datasets.plotly.json_dataset.JSONDataSet",
)

def is_image_node(self):
Expand Down Expand Up @@ -558,13 +560,13 @@ class DataNodeMetadata(GraphNodeMetadata):

# the optional plot data if the underlying dataset has a plot.
# currently only applicable for PlotlyDataSet
plot: Optional[Dict] = field(init=False)
plot: Optional[Dict] = field(init=False, default=None)

# the optional image data if the underlying dataset has a image.
# currently only applicable for matplotlib.MatplotlibWriter
image: Optional[str] = field(init=False)
image: Optional[str] = field(init=False, default=None)

tracking_data: Optional[Dict] = field(init=False)
tracking_data: Optional[Dict] = field(init=False, default=None)

# command to run the pipeline to this data node
run_command: Optional[str] = field(init=False, default=None)
Expand Down Expand Up @@ -641,7 +643,7 @@ def load_versioned_tracking_data(
continue
else:
path = version / Path(filepath).name
with open(path) as fs_file:
with open(path, encoding="utf8") as fs_file:
versions[run_id] = json_stdlib.load(fs_file)
return versions

Expand Down
6 changes: 4 additions & 2 deletions package/kedro_viz/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,11 +90,13 @@ def run_server(
if save_file:
response = get_default_response()
try:
Path(save_file).write_text(response.json(indent=4, sort_keys=True))
Path(save_file).write_text(
response.json(indent=4, sort_keys=True), encoding="utf8"
)
except TypeError: # pragma: no cover
# Keys of incomparable types (e.g. string and int) cannot be sorted.
Path(save_file).write_text(
response.json(indent=4, sort_keys=False)
response.json(indent=4, sort_keys=False), encoding="utf8"
) # pragma: no cover
app = apps.create_api_app_from_project(path, autoreload)
else:
Expand Down
14 changes: 7 additions & 7 deletions package/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
semver~=2.10 # Needs to be at least 2.10.0 to get VersionInfo.match
semver~=2.13 # Needs to be at least 2.10.0 to get VersionInfo.match
kedro>=0.17.0
ipython>=7.0.0, <8.0
fastapi>=0.63.0, <0.67.0
aiofiles==0.6.0
uvicorn[standard]~=0.17.0
watchgod~=0.8.0
ipython>=7.0.0, <9.0
fastapi>=0.73.0, <0.86.0
aiofiles==22.1.0
uvicorn[standard]~=0.18.3
watchgod~=0.8.2
plotly>=4.0
pandas>=0.24
sqlalchemy~=1.2
sqlalchemy~=1.4
strawberry-graphql>=0.99.0, <1.0
networkx>=1.0
28 changes: 14 additions & 14 deletions package/test_requirements.txt
Original file line number Diff line number Diff line change
@@ -1,21 +1,21 @@
-r requirements.txt
kedro[pandas.ParquetDataSet]>=0.17.0
kedro-telemetry>=0.1.1 # for testing telemetry integration
bandit~=1.6.2
behave>=1.2.6, <2.0
black~=22.0
flake8~=3.9.2
isort~=5.8.0
matplotlib~=3.3.4
mypy~=0.930
psutil==5.8.0 # same as Kedro for now
pylint~=2.8.2
pytest~=6.2.0
pytest-asyncio~=0.17.2
pytest-cov~=2.11.1
pytest-mock~=3.6.1
bandit~=1.7
behave~=1.2
black~=22.8
flake8~=5.0
isort~=5.10
matplotlib~=3.5
mypy~=0.971
psutil==5.9 # same as Kedro for now
pylint~=2.15
pytest~=7.1
pytest-asyncio~=0.19
pytest-cov~=3.0
pytest-mock~=3.8
sqlalchemy-stubs~=0.4
trufflehog~=2.1.0
trufflehog~=2.2

# mypy
types-aiofiles==0.1.3
Expand Down
2 changes: 1 addition & 1 deletion package/tests/test_integrations/test_sqlite_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def db_session_class(store_path):
class TestSQLiteStore:
def test_empty(self, store_path):
sqlite_store = SQLiteStore(store_path, next(session_id()))
assert sqlite_store == {}
assert not sqlite_store
assert sqlite_store.location == store_path / "session_store.db"

def test_save_single_run(self, store_path, db_session_class):
Expand Down
21 changes: 10 additions & 11 deletions package/tests/test_models/test_flowchart.py
Original file line number Diff line number Diff line change
Expand Up @@ -418,7 +418,7 @@ def test_plotly_data_node_dataset_not_exist(self):
plotly_data_node.is_tracking_node.return_value = False
plotly_data_node.kedro_obj.exists.return_value = False
plotly_node_metadata = DataNodeMetadata(data_node=plotly_data_node)
assert not hasattr(plotly_node_metadata, "plot")
assert plotly_node_metadata.plot is None

def test_plotly_json_dataset_node_metadata(self):
mock_plot_data = {
Expand Down Expand Up @@ -458,7 +458,7 @@ def test_image_data_node_dataset_not_exist(self):
image_dataset_node.is_plot_node.return_value = False
image_dataset_node.kedro_obj.exists.return_value = False
image_node_metadata = DataNodeMetadata(data_node=image_dataset_node)
assert not hasattr(image_node_metadata, "image")
assert image_node_metadata.image is None

def test_json_data_node_metadata(self):
mock_json_data = {
Expand All @@ -475,7 +475,7 @@ def test_json_data_node_metadata(self):
json_data_node.kedro_obj.load.return_value = mock_json_data
json_node_metadata = DataNodeMetadata(data_node=json_data_node)
assert json_node_metadata.tracking_data == mock_json_data
assert not hasattr(json_node_metadata, "plot")
assert json_node_metadata.plot is None

def test_metrics_data_node_metadata_dataset_not_exist(self):
metrics_data_node = MagicMock()
Expand All @@ -484,8 +484,7 @@ def test_metrics_data_node_metadata_dataset_not_exist(self):
metrics_data_node.is_metric_node.return_value = True
metrics_data_node.kedro_obj.exists.return_value = False
metrics_node_metadata = DataNodeMetadata(data_node=metrics_data_node)
assert not hasattr(metrics_node_metadata, "metrics")
assert not hasattr(metrics_node_metadata, "plot")
assert metrics_node_metadata.plot is None

def test_data_node_metadata_latest_tracking_data_not_exist(self):
plotly_data_node = MagicMock()
Expand All @@ -494,7 +493,7 @@ def test_data_node_metadata_latest_tracking_data_not_exist(self):
plotly_data_node.is_tracking_node.return_value = False
plotly_data_node.kedro_obj.exists.return_value = False
plotly_node_metadata = DataNodeMetadata(data_node=plotly_data_node)
assert not hasattr(plotly_node_metadata, "plot")
assert plotly_node_metadata.plot is None

@patch("kedro_viz.models.flowchart.DataNodeMetadata.load_versioned_tracking_data")
def test_tracking_data_node_metadata_versioned_dataset(self, patched_data_loader):
Expand All @@ -510,7 +509,7 @@ def test_tracking_data_node_metadata_versioned_dataset(self, patched_data_loader
tracking_data_node.kedro_obj.load.return_value = mock_metrics_data
tracking_data_node_metadata = DataNodeMetadata(data_node=tracking_data_node)
assert tracking_data_node_metadata.tracking_data == mock_metrics_data
assert hasattr(tracking_data_node_metadata, "plot")
assert tracking_data_node_metadata.plot is not None

@patch("kedro_viz.models.flowchart.DataNodeMetadata.load_versioned_tracking_data")
def test_tracking_data_node_metadata_versioned_dataset_not_exist(
Expand All @@ -529,7 +528,7 @@ def test_tracking_data_node_metadata_versioned_dataset_not_exist(
tracking_data_node.kedro_obj.load.return_value = mock_metrics_data
tracking_data_node_metadata = DataNodeMetadata(data_node=tracking_data_node)
assert tracking_data_node_metadata.tracking_data == mock_metrics_data
assert not hasattr(tracking_data_node_metadata, "plot")
assert tracking_data_node_metadata.plot is None

def test_data_node_metadata_create_metrics_plot(self):
test_versioned_data = {
Expand Down Expand Up @@ -566,7 +565,7 @@ def tracking_data_filepath(self, tmp_path):
for index, directory in enumerate(dir_name):
filepath = Path(source_dir / directory / filename)
filepath.parent.mkdir(parents=True, exist_ok=True)
filepath.write_text(json.dumps(json_content[index]))
filepath.write_text(json.dumps(json_content[index]), encoding="utf8")
return source_dir

@pytest.fixture
Expand All @@ -589,7 +588,7 @@ def tracking_data_filepath_reload(self, tmp_path):
for index, directory in enumerate(dir_name):
filepath = Path(source_dir / directory / filename)
filepath.parent.mkdir(parents=True, exist_ok=True)
filepath.write_text(json.dumps(json_content[index]))
filepath.write_text(json.dumps(json_content[index]), encoding="utf8")
return source_dir

@pytest.fixture
Expand All @@ -612,7 +611,7 @@ def tracking_data_filepath_invalid_run_id(self, tmp_path):
for index, directory in enumerate(dir_name):
filepath = Path(source_dir / directory / filename)
filepath.parent.mkdir(parents=True, exist_ok=True)
filepath.write_text(json.dumps(json_content[index]))
filepath.write_text(json.dumps(json_content[index]), encoding="utf8")
return source_dir

def test_load_metrics_versioned_data(self, tracking_data_filepath):
Expand Down
2 changes: 1 addition & 1 deletion package/tests/test_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ def test_save_file(self, tmp_path, mocker):
)
save_file = tmp_path / "save.json"
run_server(save_file=save_file)
with open(save_file, "r") as f:
with open(save_file, "r", encoding="utf8") as f:
assert json.load(f) == {"content": "test"}

@pytest.mark.parametrize(
Expand Down
Loading

0 comments on commit edf45a4

Please sign in to comment.