Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 22 additions & 8 deletions language/automl/dataset_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,15 +35,7 @@ def test_dataset_create_import_delete(capsys):
out, _ = capsys.readouterr()
create_dataset_output = out.splitlines()
assert "Dataset id: " in create_dataset_output[1]

# import data
dataset_id = create_dataset_output[1].split()[2]
data = "gs://{}-lcm/happiness.csv".format(project_id)
automl_natural_language_dataset.import_data(
project_id, compute_region, dataset_id, data
)
out, _ = capsys.readouterr()
assert "Data imported." in out

# delete dataset
automl_natural_language_dataset.delete_dataset(
Expand All @@ -53,6 +45,28 @@ def test_dataset_create_import_delete(capsys):
assert "Dataset deleted." in out


def test_import_data(capsys):
# As importing a dataset can take a long time and only four operations can
# be run on a dataset at once. Try to import into a nonexistent dataset and
# confirm that the dataset was not found, but other elements of the request
# were valid.
try:
data = "gs://{}-lcm/happiness.csv".format(project_id)
automl_natural_language_dataset.import_data(
project_id, compute_region, "TEN0000000000000000000", data
)
out, _ = capsys.readouterr()
assert (
"Dataset doesn't exist or is inaccessible for use with AutoMl."
in out
)
except Exception as e:
assert (
"Dataset doesn't exist or is inaccessible for use with AutoMl."
in e.message
)


def test_dataset_list_get(capsys):
# list datasets
automl_natural_language_dataset.list_datasets(
Expand Down