Skip to content

Commit

Permalink
Update tests after merge
Browse files Browse the repository at this point in the history
  • Loading branch information
zhiltsov-max committed Nov 15, 2024
1 parent 037b07d commit 586704c
Show file tree
Hide file tree
Showing 2 changed files with 33 additions and 22 deletions.
15 changes: 8 additions & 7 deletions tests/python/rest_api/test_projects.py
Original file line number Diff line number Diff line change
Expand Up @@ -1007,14 +1007,15 @@ def test_cant_import_annotations_as_project(self, admin_user, tasks, format_name

def _export_task(task_id: int, format_name: str) -> io.BytesIO:
with make_api_client(admin_user) as api_client:
response = export_dataset(
api_client.tasks_api.retrieve_annotations_endpoint,
id=task_id,
format=format_name,
return io.BytesIO(
export_dataset(
api_client.tasks_api,
api_version=2,
id=task_id,
format=format_name,
save_images=False,
)
)
assert response.status == HTTPStatus.OK

return io.BytesIO(response.data)

if format_name in list(DATUMARO_FORMAT_FOR_DIMENSION.values()):
with zipfile.ZipFile(_export_task(task["id"], format_name)) as zip_file:
Expand Down
40 changes: 25 additions & 15 deletions tests/python/rest_api/test_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -992,14 +992,17 @@ def test_datumaro_export_without_annotations_includes_image_info(
)

with make_api_client(admin_user) as api_client:
response = export_dataset(
api_client.tasks_api.retrieve_annotations_endpoint,
id=task["id"],
format=DATUMARO_FORMAT_FOR_DIMENSION[dimension],
dataset_file = io.BytesIO(
export_dataset(
api_client.tasks_api,
api_version=2,
id=task["id"],
format=DATUMARO_FORMAT_FOR_DIMENSION[dimension],
save_images=False,
)
)
assert response.status == HTTPStatus.OK

with zipfile.ZipFile(io.BytesIO(response.data)) as zip_file:
with zipfile.ZipFile(dataset_file) as zip_file:
annotations = json.loads(zip_file.read("annotations/default.json"))

assert annotations["items"]
Expand Down Expand Up @@ -5232,20 +5235,27 @@ def test_import_annotations_after_deleting_related_cloud_storage(

@pytest.mark.parametrize("dimension", ["2d", "3d"])
def test_can_import_datumaro_json(self, admin_user, tasks, dimension):
task = next(t for t in tasks if t.get("size") if t["dimension"] == dimension)
task = next(
t
for t in tasks
if t.get("size")
if t["dimension"] == dimension and t.get("validation_mode") != "gt_pool"
)

with make_api_client(admin_user) as api_client:
original_annotations = json.loads(
api_client.tasks_api.retrieve_annotations(task["id"], _parse_response=True)[1].data
api_client.tasks_api.retrieve_annotations(task["id"])[1].data
)

response = export_dataset(
api_client.tasks_api.retrieve_annotations_endpoint,
id=task["id"],
format=DATUMARO_FORMAT_FOR_DIMENSION[dimension],
dataset_archive = io.BytesIO(
export_dataset(
api_client.tasks_api,
api_version=2,
id=task["id"],
format=DATUMARO_FORMAT_FOR_DIMENSION[dimension],
save_images=False,
)
)
assert response.status == HTTPStatus.OK
dataset_archive = io.BytesIO(response.data)

with zipfile.ZipFile(dataset_archive) as zip_file:
annotations = zip_file.read("annotations/default.json")
Expand All @@ -5259,7 +5269,7 @@ def test_can_import_datumaro_json(self, admin_user, tasks, dimension):

with make_api_client(admin_user) as api_client:
updated_annotations = json.loads(
api_client.tasks_api.retrieve_annotations(task["id"], _parse_response=True)[1].data
api_client.tasks_api.retrieve_annotations(task["id"])[1].data
)

assert compare_annotations(original_annotations, updated_annotations) == {}
Expand Down

0 comments on commit 586704c

Please sign in to comment.