Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enable/disable preview for all the datasets when publishing Kedro-Viz from CLI #1894

Merged
merged 32 commits into from
May 31, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
74e0371
Add options for cli common with disable preview of datasets
jitu5 May 3, 2024
5398a11
New approach for disable preview
jitu5 May 7, 2024
4da365f
Disable preview for cli and lint fix
jitu5 May 7, 2024
9f2afa2
deploy api with preview
jitu5 May 8, 2024
35f1436
Removing UI changes
jitu5 May 8, 2024
85e1a2d
Removing UI changes
jitu5 May 8, 2024
d8ef988
refactor disable preview cli
ravi-kumar-pilla May 15, 2024
3377832
fix lint
ravi-kumar-pilla May 15, 2024
7a31729
fix lint
ravi-kumar-pilla May 15, 2024
028a7f3
Merge branch 'main' of https://github.com/kedro-org/kedro-viz into fe…
ravi-kumar-pilla May 16, 2024
78f44bb
testing 1
ravi-kumar-pilla May 16, 2024
663924c
change help text
ravi-kumar-pilla May 16, 2024
defbf66
change help text
ravi-kumar-pilla May 16, 2024
9195150
update tests
ravi-kumar-pilla May 16, 2024
c4c7375
format help text
ravi-kumar-pilla May 16, 2024
6eaaaeb
change default preview for run
ravi-kumar-pilla May 16, 2024
8372ec3
update release note
ravi-kumar-pilla May 16, 2024
2f67e78
address vale suggestion
ravi-kumar-pilla May 17, 2024
12eabd1
fix pytests and linter
ravi-kumar-pilla May 17, 2024
57b68b3
adjust file permission
ravi-kumar-pilla May 17, 2024
4f5c446
update release note
ravi-kumar-pilla May 17, 2024
c815a9b
--include-preview flag added for CLI
jitu5 May 20, 2024
5a43fda
Merge branch 'main' into feature/disable-preview
jitu5 May 20, 2024
0998f7a
Merge branch 'main' into feature/disable-preview
jitu5 May 21, 2024
d06f36e
are_datasets_previewable renamed to is_all_previews_enabled
jitu5 May 21, 2024
73b1944
lint fix
jitu5 May 21, 2024
94e3501
type ignore add for TRANSCODING_SEPARATOR
jitu5 May 21, 2024
46dbf28
Merge branch 'main' into feature/disable-preview
jitu5 May 22, 2024
20bbe47
Merge branch 'main' into feature/disable-preview
jitu5 May 29, 2024
b80c799
Merge branch 'main' into feature/disable-preview
jitu5 May 30, 2024
788a491
Merge branch 'main' into feature/disable-preview
jitu5 May 31, 2024
57aec4e
Merge branch 'main' into feature/disable-preview
jitu5 May 31, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions RELEASE.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@ Please follow the established format:
- Introduce the toggle to expand and collapse all pipelines button in the utility bar. (#1858)
- Allow Kedro-Viz commands to run from any sub directory within Kedro project. (#1871)

- Enable/disable preview for all the datasets when publishing Kedro-Viz from CLI. (#1894)

## Bug fixes and other changes
- Fix broken URL when active pipeline name changes on initial load. (#1914)
- Fix bug related to tag filtering and sharing with stateful URL. (#1878)
Expand Down
11 changes: 8 additions & 3 deletions package/kedro_viz/api/rest/responses.py
Original file line number Diff line number Diff line change
Expand Up @@ -420,8 +420,13 @@ def save_api_main_response_to_fs(main_path: str, remote_fs: Any):
raise exc


def save_api_node_response_to_fs(nodes_path: str, remote_fs: Any):
def save_api_node_response_to_fs(
nodes_path: str, remote_fs: Any, is_all_previews_enabled: bool
):
"""Saves API /nodes/{node} response to a directory."""
# Set if preview is enabled/disabled for all data nodes
DataNodeMetadata.set_is_all_previews_enabled(is_all_previews_enabled)

for nodeId in data_access_manager.nodes.get_node_ids():
try:
write_api_response_to_fs(
Expand Down Expand Up @@ -452,7 +457,7 @@ def save_api_pipeline_response_to_fs(pipelines_path: str, remote_fs: Any):
raise exc


def save_api_responses_to_fs(path: str, remote_fs: Any):
def save_api_responses_to_fs(path: str, remote_fs: Any, is_all_previews_enabled: bool):
"""Saves all Kedro Viz API responses to a directory."""
try:
logger.debug(
Expand All @@ -470,7 +475,7 @@ def save_api_responses_to_fs(path: str, remote_fs: Any):
remote_fs.makedirs(pipelines_path, exist_ok=True)

save_api_main_response_to_fs(main_path, remote_fs)
save_api_node_response_to_fs(nodes_path, remote_fs)
save_api_node_response_to_fs(nodes_path, remote_fs, is_all_previews_enabled)
save_api_pipeline_response_to_fs(pipelines_path, remote_fs)

except Exception as exc: # pragma: no cover
Expand Down
8 changes: 4 additions & 4 deletions package/kedro_viz/integrations/deployment/base_deployer.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,9 @@ def __init__(self):
self._path = None
self._fs = None

def _upload_api_responses(self):
def _upload_api_responses(self, is_all_previews_enabled: bool):
"""Write API responses to the build."""
save_api_responses_to_fs(self._path, self._fs)
save_api_responses_to_fs(self._path, self._fs, is_all_previews_enabled)

def _ingest_heap_analytics(self):
"""Ingest heap analytics to index file in the build."""
Expand Down Expand Up @@ -98,9 +98,9 @@ def _upload_deploy_viz_metadata_file(self):
logger.exception("Upload failed: %s ", exc)
raise exc

def deploy(self):
def deploy(self, is_all_previews_enabled: bool = False):
"""Create and deploy all static files to local/remote file system"""

self._upload_api_responses()
self._upload_api_responses(is_all_previews_enabled)
self._upload_static_files(_HTML_DIR)
self._upload_deploy_viz_metadata_file()
34 changes: 28 additions & 6 deletions package/kedro_viz/launchers/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,12 @@ def run(
is_flag=True,
help="A flag to include all registered hooks in your Kedro Project",
)
def deploy(platform, endpoint, bucket_name, include_hooks):
@click.option(
"--include-preview",
is_flag=True,
help="Enable/disable preview for all the datasets.",
)
def deploy(platform, endpoint, bucket_name, include_hooks, include_preview):
"""Deploy and host Kedro Viz on provided platform"""
if not platform or platform.lower() not in SHAREABLEVIZ_SUPPORTED_PLATFORMS:
display_cli_message(
Expand All @@ -252,7 +257,13 @@ def deploy(platform, endpoint, bucket_name, include_hooks):
)
return

create_shareableviz_process(platform, endpoint, bucket_name, include_hooks)
create_shareableviz_process(
platform,
include_preview,
endpoint,
bucket_name,
include_hooks,
)


@viz.command(context_settings={"help_option_names": ["-h", "--help"]})
Expand All @@ -261,14 +272,23 @@ def deploy(platform, endpoint, bucket_name, include_hooks):
is_flag=True,
help="A flag to include all registered hooks in your Kedro Project",
)
def build(include_hooks):
@click.option(
"--include-preview",
is_flag=True,
help="Enable/disable preview for all the datasets.",
)
def build(include_hooks, include_preview):
"""Create build directory of local Kedro Viz instance with Kedro project data"""

create_shareableviz_process("local", include_hooks=include_hooks)
create_shareableviz_process("local", include_preview, include_hooks=include_hooks)


def create_shareableviz_process(
platform, endpoint=None, bucket_name=None, include_hooks=False
platform,
is_all_previews_enabled,
endpoint=None,
bucket_name=None,
include_hooks=False,
):
"""Creates platform specific deployer process"""
try:
Expand All @@ -279,6 +299,7 @@ def create_shareableviz_process(
target=load_and_deploy_viz,
args=(
platform,
is_all_previews_enabled,
endpoint,
bucket_name,
include_hooks,
Expand Down Expand Up @@ -354,6 +375,7 @@ def create_shareableviz_process(

def load_and_deploy_viz(
platform,
is_all_previews_enabled,
endpoint,
bucket_name,
include_hooks,
Expand All @@ -369,7 +391,7 @@ def load_and_deploy_viz(

# Start the deployment
deployer = DeployerFactory.create_deployer(platform, endpoint, bucket_name)
deployer.deploy()
deployer.deploy(is_all_previews_enabled)

except (
# pylint: disable=catching-non-exception
Expand Down
25 changes: 20 additions & 5 deletions package/kedro_viz/models/flowchart.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from enum import Enum
from pathlib import Path
from types import FunctionType
from typing import Any, Dict, List, Optional, Set, Union, cast
from typing import Any, ClassVar, Dict, List, Optional, Set, Union, cast

from kedro.pipeline.node import Node as KedroNode
from pydantic import (
Expand Down Expand Up @@ -722,12 +722,19 @@ class DataNodeMetadata(GraphNodeMetadata):
Args:
data_node (DataNode): Data node to which this metadata belongs to.

Attributes:
is_all_previews_enabled (bool): Class-level attribute to determine if
previews are enabled for all nodes. This can be configured via CLI
or UI to manage the preview settings.

Raises:
AssertionError: If data_node is not supplied during instantiation
"""

data_node: DataNode = Field(..., exclude=True)

is_all_previews_enabled: ClassVar[bool] = True

type: Optional[str] = Field(
default=None, validate_default=True, description="The type of the data node"
)
Expand Down Expand Up @@ -769,6 +776,10 @@ def check_data_node_exists(cls, values):
cls.set_data_node_and_dataset(values["data_node"])
return values

@classmethod
def set_is_all_previews_enabled(cls, value: bool):
cls.is_all_previews_enabled = value

@classmethod
def set_data_node_and_dataset(cls, data_node):
cls.data_node = data_node
Expand Down Expand Up @@ -799,8 +810,10 @@ def set_run_command(cls, _):
@field_validator("preview")
@classmethod
def set_preview(cls, _):
if not cls.data_node.is_preview_enabled() or not hasattr(
cls.dataset, "preview"
if (
not cls.data_node.is_preview_enabled()
or not hasattr(cls.dataset, "preview")
or not cls.is_all_previews_enabled
):
return None

Expand All @@ -824,8 +837,10 @@ def set_preview(cls, _):
@field_validator("preview_type")
@classmethod
def set_preview_type(cls, _):
if not cls.data_node.is_preview_enabled() or not hasattr(
cls.dataset, "preview"
if (
not cls.data_node.is_preview_enabled()
or not hasattr(cls.dataset, "preview")
or not cls.is_all_previews_enabled
):
return None

Expand Down
5 changes: 3 additions & 2 deletions package/kedro_viz/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,9 +125,10 @@ def run_server(
pipeline_name,
extra_params,
)

# [TODO: As we can do this with `kedro viz build`,
# we need to shift this feature outside of kedro viz run]
if save_file:
save_api_responses_to_fs(save_file, fsspec.filesystem("file"))
save_api_responses_to_fs(save_file, fsspec.filesystem("file"), True)

app = apps.create_api_app_from_project(path, autoreload)
else:
Expand Down
7 changes: 6 additions & 1 deletion package/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from kedro_viz.data_access import DataAccessManager
from kedro_viz.integrations.kedro.hooks import DatasetStatsHook
from kedro_viz.integrations.kedro.sqlite_store import SQLiteStore
from kedro_viz.models.flowchart import GraphNode
from kedro_viz.models.flowchart import DataNodeMetadata, GraphNode
from kedro_viz.server import populate_data


Expand Down Expand Up @@ -350,3 +350,8 @@ def pipeline_with_data_sets_mock():
pipeline = mock.MagicMock()
pipeline.data_sets.return_value = ["model_inputs#csv"]
return pipeline


@pytest.fixture(autouse=True)
def reset_is_all_previews_enabled():
DataNodeMetadata.is_all_previews_enabled = True
22 changes: 14 additions & 8 deletions package/tests/test_api/test_rest/test_responses.py
Original file line number Diff line number Diff line change
Expand Up @@ -979,7 +979,7 @@ def test_save_api_node_response_to_fs(self, mocker):
)
remote_fs = Mock()

save_api_node_response_to_fs(nodes_path, remote_fs)
save_api_node_response_to_fs(nodes_path, remote_fs, False)

assert mock_write_api_response_to_fs.call_count == len(nodeIds)
assert mock_get_node_metadata_response.call_count == len(nodeIds)
Expand Down Expand Up @@ -1031,14 +1031,16 @@ def test_save_api_pipeline_response_to_fs(self, mocker):
mock_write_api_response_to_fs.assert_has_calls(expected_calls, any_order=True)

@pytest.mark.parametrize(
"file_path, protocol",
"file_path, protocol, is_all_previews_enabled",
[
("s3://shareableviz", "s3"),
("abfs://shareableviz", "abfs"),
("shareableviz", "file"),
("s3://shareableviz", "s3", True),
("abfs://shareableviz", "abfs", False),
("shareableviz", "file", True),
],
)
def test_save_api_responses_to_fs(self, file_path, protocol, mocker):
def test_save_api_responses_to_fs(
self, file_path, protocol, is_all_previews_enabled, mocker
):
mock_api_main_response_to_fs = mocker.patch(
"kedro_viz.api.rest.responses.save_api_main_response_to_fs"
)
Expand All @@ -1052,13 +1054,17 @@ def test_save_api_responses_to_fs(self, file_path, protocol, mocker):
mock_filesystem = mocker.patch("fsspec.filesystem")
mock_filesystem.return_value.protocol = protocol

save_api_responses_to_fs(file_path, mock_filesystem.return_value)
save_api_responses_to_fs(
file_path, mock_filesystem.return_value, is_all_previews_enabled
)

mock_api_main_response_to_fs.assert_called_once_with(
f"{file_path}/api/main", mock_filesystem.return_value
)
mock_api_node_response_to_fs.assert_called_once_with(
f"{file_path}/api/nodes", mock_filesystem.return_value
f"{file_path}/api/nodes",
mock_filesystem.return_value,
is_all_previews_enabled,
)
mock_api_pipeline_response_to_fs.assert_called_once_with(
f"{file_path}/api/pipelines", mock_filesystem.return_value
Expand Down
2 changes: 1 addition & 1 deletion package/tests/test_integrations/test_azure_deployer.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def mock_file_system(mocker):


class TestAzureDeployer:
def test_deploy(self, endpoint, bucket_name, mocker):
def test_deploy(self, endpoint, bucket_name, mocker, mock_file_system):
deployer = AzureDeployer(endpoint, bucket_name)

mocker.patch.object(deployer, "_upload_api_responses")
Expand Down
6 changes: 4 additions & 2 deletions package/tests/test_integrations/test_base_deployer.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,11 @@ def test_upload_api_responses(self, mocker):
"kedro_viz.integrations.deployment.base_deployer.save_api_responses_to_fs"
)
build = ConcreteBaseDeployer()
build._upload_api_responses()
build._upload_api_responses(False)

save_api_responses_to_fs_mock.assert_called_once_with(build._path, build._fs)
save_api_responses_to_fs_mock.assert_called_once_with(
build._path, build._fs, False
)

def test_upload_static_files(self, mocker):
mocker.patch("fsspec.filesystem")
Expand Down
3 changes: 2 additions & 1 deletion package/tests/test_integrations/test_deployer_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@
("gcp", "http://mocked-url.com", "shareableviz", GCPDeployer),
],
)
def test_create_deployer(platform, endpoint, bucket_name, deployer_class):
def test_create_deployer(platform, endpoint, bucket_name, deployer_class, mocker):
mocker.patch("fsspec.filesystem")
deployer = DeployerFactory.create_deployer(platform, endpoint, bucket_name)
assert isinstance(deployer, deployer_class)
assert deployer._endpoint == endpoint
Expand Down
2 changes: 1 addition & 1 deletion package/tests/test_integrations/test_gcp_deployer.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ def mock_file_system(mocker):


class TestGCPDeployer:
def test_deploy(self, endpoint, bucket_name, mocker):
def test_deploy(self, endpoint, bucket_name, mocker, mock_file_system):
deployer = GCPDeployer(endpoint, bucket_name)

mocker.patch.object(deployer, "_upload_api_responses")
Expand Down
Loading
Loading