Skip to content

Commit

Permalink
fix CI tests
Browse files Browse the repository at this point in the history
  • Loading branch information
Wauplin committed Sep 18, 2023
1 parent 31d8510 commit 37a9609
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 4 deletions.
6 changes: 4 additions & 2 deletions tests/test_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,9 @@ def test_upload_folder_mock(self, create_mock: Mock, upload_mock: Mock) -> None:
)
cmd.run()

create_mock.assert_called_once_with(repo_id="my-model", repo_type="model", exist_ok=True, private=True)
create_mock.assert_called_once_with(
repo_id="my-model", repo_type="model", exist_ok=True, private=True, space_sdk=None
)
upload_mock.assert_called_once_with(
folder_path=cache_dir,
path_in_repo=".",
Expand Down Expand Up @@ -251,7 +253,7 @@ def test_upload_file_mock(self, create_mock: Mock, upload_mock: Mock) -> None:
cmd.run()

create_mock.assert_called_once_with(
repo_id="my-dataset", repo_type="dataset", exist_ok=True, private=False
repo_id="my-dataset", repo_type="dataset", exist_ok=True, private=False, space_sdk=None
)
upload_mock.assert_called_once_with(
path_or_fileobj=str(file_path),
Expand Down
2 changes: 1 addition & 1 deletion tests/test_inference_async_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@ async def test_get_status_too_big_model() -> None:

@pytest.mark.asyncio
async def test_get_status_loaded_model() -> None:
model_status = await AsyncInferenceClient().get_model_status("bigcode/starcoder")
model_status = await AsyncInferenceClient().get_model_status("bigscience/bloom")
assert model_status.loaded is True
assert model_status.state == "Loaded"
assert model_status.compute_type == "gpu"
Expand Down
2 changes: 1 addition & 1 deletion tests/test_inference_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -519,7 +519,7 @@ def test_too_big_model(self) -> None:

def test_loaded_model(self) -> None:
client = InferenceClient()
model_status = client.get_model_status("bigcode/starcoder")
model_status = client.get_model_status("bigscience/bloom")
self.assertTrue(model_status.loaded)
self.assertEqual(model_status.state, "Loaded")
self.assertEqual(model_status.compute_type, "gpu")
Expand Down

0 comments on commit 37a9609

Please sign in to comment.