Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Migrate Viz to use Pydantic V2 #1743

Merged
merged 10 commits into from
Feb 21, 2024
1 change: 1 addition & 0 deletions RELEASE.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ Please follow the established format:
## Major features and improvements

- Drop support for `python=3.8`. (#1747)
- Migrate Viz to use `pydantic>=2`. (#1743)

## Bug fixes and other changes

Expand Down
3 changes: 1 addition & 2 deletions package/features/steps/lower_requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
ipython==7.0.0
fastapi==0.73.0
fastapi==0.100.0
fsspec==2021.4
aiofiles==22.1.0
uvicorn[standard]==0.22.0
Expand All @@ -13,4 +13,3 @@ strawberry-graphql==0.192.0
networkx==2.5
orjson==3.9
secure==0.3.0
pydantic==1.10
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This seems different from the discussion in #1603. Just to confirm, we are dropping Pydantic v1 support altogether now.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yes we are dropping support for pydantic v1 as there are issues with the v1 api mentioned here and an open PR - https://github.com/tiangolo/fastapi/pull/10223/files

74 changes: 37 additions & 37 deletions package/kedro_viz/api/rest/responses.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""`kedro_viz.api.rest.responses` defines REST response types."""

# pylint: disable=missing-class-docstring,invalid-name
import abc
import logging
Expand All @@ -10,7 +11,7 @@
from fastapi.encoders import jsonable_encoder
from fastapi.responses import JSONResponse, ORJSONResponse
from kedro.io.core import get_protocol_and_path
from pydantic import BaseModel
from pydantic import BaseModel, ConfigDict

from kedro_viz.api.rest.utils import get_package_version
from kedro_viz.data_access import data_access_manager
Expand All @@ -35,8 +36,7 @@ class APIErrorMessage(BaseModel):


class BaseAPIResponse(BaseModel, abc.ABC):
class Config:
orm_mode = True
model_config = ConfigDict(from_attributes=True)


class BaseGraphNodeAPIResponse(BaseAPIResponse):
Expand All @@ -47,14 +47,13 @@ class BaseGraphNodeAPIResponse(BaseAPIResponse):
type: str

# If a node is a ModularPipeline node, this value will be None, hence Optional.
modular_pipelines: Optional[List[str]]
modular_pipelines: Optional[List[str]] = None


class TaskNodeAPIResponse(BaseGraphNodeAPIResponse):
parameters: Dict

class Config:
schema_extra = {
model_config = ConfigDict(
json_schema_extra={
"example": {
"id": "6ab908b8",
"name": "split_data_node",
Expand All @@ -78,15 +77,15 @@ class Config:
},
}
}
)


class DataNodeAPIResponse(BaseGraphNodeAPIResponse):
layer: Optional[str]
dataset_type: Optional[str]
stats: Optional[Dict]

class Config:
schema_extra = {
layer: Optional[str] = None
dataset_type: Optional[str] = None
stats: Optional[Dict] = None
model_config = ConfigDict(
json_schema_extra={
"example": {
"id": "d7b83b05",
"name": "master_table",
Expand All @@ -99,6 +98,7 @@ class Config:
"stats": {"rows": 10, "columns": 2, "file_size": 2300},
}
}
)


NodeAPIResponse = Union[
Expand All @@ -108,15 +108,14 @@ class Config:


class TaskNodeMetadataAPIResponse(BaseAPIResponse):
code: Optional[str]
filepath: Optional[str]
parameters: Optional[Dict]
code: Optional[str] = None
filepath: Optional[str] = None
parameters: Optional[Dict] = None
inputs: List[str]
outputs: List[str]
run_command: Optional[str]

class Config:
schema_extra = {
run_command: Optional[str] = None
model_config = ConfigDict(
json_schema_extra={
"example": {
"code": "def split_data(data: pd.DataFrame, parameters: Dict) -> Tuple:",
"filepath": "proj/src/new_kedro_project/pipelines/data_science/nodes.py",
Expand All @@ -126,39 +125,39 @@ class Config:
"run_command": "kedro run --to-nodes=split_data",
}
}
)


class DataNodeMetadataAPIResponse(BaseAPIResponse):
filepath: Optional[str]
filepath: Optional[str] = None
type: str
run_command: Optional[str]
preview: Optional[Union[Dict, str]]
preview_type: Optional[str]
stats: Optional[Dict]

class Config:
schema_extra = {
run_command: Optional[str] = None
preview: Optional[Union[Dict, str]] = None
preview_type: Optional[str] = None
stats: Optional[Dict] = None
model_config = ConfigDict(
json_schema_extra={
"example": {
"filepath": "/my-kedro-project/data/03_primary/master_table.csv",
"type": "kedro_datasets.pandas.csv_dataset.CSVDataset",
"run_command": "kedro run --to-outputs=master_table",
}
}
)


class TranscodedDataNodeMetadataAPIReponse(BaseAPIResponse):
filepath: str
original_type: str
transcoded_types: List[str]
run_command: Optional[str]
stats: Optional[Dict]
run_command: Optional[str] = None
stats: Optional[Dict] = None


class ParametersNodeMetadataAPIResponse(BaseAPIResponse):
parameters: Dict

class Config:
schema_extra = {
model_config = ConfigDict(
json_schema_extra={
"example": {
"parameters": {
"test_size": 0.2,
Expand All @@ -176,6 +175,7 @@ class Config:
}
}
}
)


NodeMetadataAPIResponse = Union[
Expand All @@ -197,7 +197,7 @@ class NamedEntityAPIResponse(BaseAPIResponse):
"""

id: str
name: Optional[str]
name: Optional[str] = None


class ModularPipelineChildAPIResponse(BaseAPIResponse):
Expand Down Expand Up @@ -266,15 +266,15 @@ class PackageCompatibilityAPIResponse(BaseAPIResponse):
package_name: str
package_version: str
is_compatible: bool

class Config:
schema_extra = {
model_config = ConfigDict(
json_schema_extra={
"example": {
"package_name": "fsspec",
"package_version": "2023.9.1",
"is_compatible": True,
}
}
)


class EnhancedORJSONResponse(ORJSONResponse):
Expand Down
1 change: 1 addition & 0 deletions package/kedro_viz/integrations/kedro/data_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
load data from a Kedro project. It takes care of making sure viz can
load data from projects created in a range of Kedro versions.
"""

# pylint: disable=import-outside-toplevel, protected-access

import json
Expand Down
11 changes: 4 additions & 7 deletions package/kedro_viz/models/experiment_tracking.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
"""kedro_viz.models.experiment_tracking` defines data models to represent run data and
tracking datasets."""
# pylint: disable=too-few-public-methods,protected-access,missing-class-docstring,missing-function-docstring
# pylint: disable=too-few-public-methods,protected-access,missing-function-docstring
import logging
from dataclasses import dataclass, field
from enum import Enum
from typing import TYPE_CHECKING, Any, Dict

from kedro.io import Version
from pydantic import ConfigDict
from sqlalchemy import Column
from sqlalchemy.orm import declarative_base # type: ignore
from sqlalchemy.sql.schema import ForeignKey
Expand Down Expand Up @@ -35,9 +36,7 @@ class RunModel(Base): # type: ignore

id = Column(String, primary_key=True, index=True)
blob = Column(JSON)

class Config:
orm_mode = True
model_config = ConfigDict(from_attributes=True)


class UserRunDetailsModel(Base): # type: ignore
Expand All @@ -50,9 +49,7 @@ class UserRunDetailsModel(Base): # type: ignore
bookmark = Column(Boolean, default=False)
title = Column(String)
notes = Column(String)

class Config:
orm_mode = True
model_config = ConfigDict(from_attributes=True)


class TrackingDatasetGroup(str, Enum):
Expand Down
Loading