Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Bug/Fix columnar batch validation function #768

Merged
merged 2 commits into from
Oct 8, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 11 additions & 11 deletions src/power_grid_model/validation/validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,26 +131,26 @@ def validate_batch_data(
Raises:
Error: KeyError | TypeError | ValueError: if the data structure is invalid.
"""
assert_valid_data_structure(input_data, DatasetType.input)
# Convert to row based if in columnar or mixed format
row_input_data = compatibility_convert_row_columnar_dataset(input_data, None, DatasetType.input)

input_errors: list[ValidationError] = list(validate_unique_ids_across_components(input_data))
# A deep copy is made of the input data, since default values will be added in the validation process
input_data_copy = copy.deepcopy(row_input_data)
assert_valid_data_structure(input_data_copy, DatasetType.input)

# Convert to row based if in columnar format
# TODO(figueroa1395): transform to columnar per single batch scenario once the columnar dataset python extension
# is finished
row_update_data = compatibility_convert_row_columnar_dataset(update_data, None, DatasetType.update)
input_errors: list[ValidationError] = list(validate_unique_ids_across_components(input_data_copy))

# Splitting update_data_into_batches may raise TypeErrors and ValueErrors
batch_data = convert_batch_dataset_to_batch_list(row_update_data)
batch_data = convert_batch_dataset_to_batch_list(update_data, DatasetType.update)

errors = {}
for batch, batch_update_data in enumerate(batch_data):
assert_valid_data_structure(batch_update_data, DatasetType.update)
id_errors: list[ValidationError] = list(validate_ids_exist(batch_update_data, input_data))
row_update_data = compatibility_convert_row_columnar_dataset(batch_update_data, None, DatasetType.update)
assert_valid_data_structure(row_update_data, DatasetType.update)
id_errors: list[ValidationError] = list(validate_ids_exist(row_update_data, input_data_copy))

batch_errors = input_errors + id_errors
if not id_errors:
merged_data = update_input_data(input_data, batch_update_data)
merged_data = update_input_data(input_data_copy, row_update_data)
batch_errors += validate_required_values(merged_data, calculation_type, symmetric)
batch_errors += validate_values(merged_data, calculation_type)

Expand Down
33 changes: 29 additions & 4 deletions tests/unit/validation/test_batch_validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,14 @@
import pytest

from power_grid_model import DatasetType, LoadGenType, initialize_array
from power_grid_model._utils import compatibility_convert_row_columnar_dataset
from power_grid_model._utils import compatibility_convert_row_columnar_dataset, is_columnar
from power_grid_model.enum import ComponentAttributeFilterOptions
from power_grid_model.validation import validate_batch_data
from power_grid_model.validation.errors import MultiComponentNotUniqueError, NotBooleanError


@pytest.fixture
def input_data() -> dict[str, np.ndarray]:
def original_input_data() -> dict[str, np.ndarray]:
node = initialize_array("input", "node", 4)
node["id"] = [1, 2, 3, 4]
node["u_rated"] = 10.5e3
Expand Down Expand Up @@ -42,6 +42,27 @@ def input_data() -> dict[str, np.ndarray]:
return {"node": node, "line": line, "asym_load": asym_load}


@pytest.fixture
def original_input_data_columnar_all(original_input_data):
return compatibility_convert_row_columnar_dataset(
original_input_data, ComponentAttributeFilterOptions.ALL, DatasetType.input
)


@pytest.fixture
def original_input_data_columnar_relevant(original_input_data):
return compatibility_convert_row_columnar_dataset(
original_input_data, ComponentAttributeFilterOptions.RELEVANT, DatasetType.input
)


@pytest.fixture(
params=["original_input_data", "original_input_data_columnar_all", "original_input_data_columnar_relevant"]
)
def input_data(request):
return request.getfixturevalue(request.param)


@pytest.fixture
def original_batch_data() -> dict[str, np.ndarray]:
line = initialize_array("update", "line", (3, 2))
Expand Down Expand Up @@ -82,8 +103,12 @@ def test_validate_batch_data(input_data, batch_data):


def test_validate_batch_data_input_error(input_data, batch_data):
input_data["node"][-1]["id"] = 123
input_data["line"][-1]["id"] = 123
if is_columnar(input_data):
input_data["node"]["id"][-1] = 123
input_data["line"]["id"][-1] = 123
else:
input_data["node"][-1]["id"] = 123
input_data["line"][-1]["id"] = 123
errors = validate_batch_data(input_data, batch_data)
assert len(errors) == 3
assert [MultiComponentNotUniqueError([("line", "id"), ("node", "id")], [("line", 123), ("node", 123)])] == errors[0]
Expand Down
Loading