diff --git a/bigquery/google/cloud/bigquery/client.py b/bigquery/google/cloud/bigquery/client.py index f40904c2057e..1bd46e407968 100644 --- a/bigquery/google/cloud/bigquery/client.py +++ b/bigquery/google/cloud/bigquery/client.py @@ -185,8 +185,6 @@ def create_dataset(self, dataset): :rtype: ":class:`~google.cloud.bigquery.dataset.Dataset`" :returns: a new ``Dataset`` returned from the service. """ - if dataset.project is None: - dataset._project = self.project path = '/projects/%s/datasets' % (dataset.project,) api_response = self._connection.api_request( method='POST', path=path, data=dataset._build_resource()) @@ -244,8 +242,6 @@ def update_dataset(self, dataset, fields): :rtype: :class:`google.cloud.bigquery.dataset.Dataset` :returns: the modified ``Dataset`` instance """ - if dataset.project is None: - dataset._project = self.project path = '/projects/%s/datasets/%s' % (dataset.project, dataset.dataset_id) partial = {} diff --git a/bigquery/google/cloud/bigquery/dataset.py b/bigquery/google/cloud/bigquery/dataset.py index 105772261449..fc641c3a4d58 100644 --- a/bigquery/google/cloud/bigquery/dataset.py +++ b/bigquery/google/cloud/bigquery/dataset.py @@ -106,6 +106,10 @@ class DatasetReference(object): """ def __init__(self, project, dataset_id): + if not isinstance(project, six.string_types): + raise ValueError("Pass a string for project") + if not isinstance(dataset_id, six.string_types): + raise ValueError("Pass a string for dataset_id") self._project = project self._dataset_id = dataset_id @@ -154,27 +158,15 @@ class Dataset(object): See https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets - :type dataset_id: str - :param dataset_id: the ID of the dataset - - :type access_entries: list of :class:`AccessEntry` - :param access_entries: roles granted to entities for this dataset - - :type project: str - :param project: (Optional) project ID for the dataset. + :type dataset_ref: :class:`~google.cloud.bigquery.dataset.DatasetReference` + :param dataset_ref: a pointer to a dataset """ - _access_entries = None - - def __init__(self, - dataset_id, - access_entries=(), - project=None): - self._dataset_id = dataset_id + def __init__(self, dataset_ref): + self._project = dataset_ref.project + self._dataset_id = dataset_ref.dataset_id self._properties = {'labels': {}} - # Let the @property do validation. - self.access_entries = access_entries - self._project = project + self._access_entries = () @property def project(self): @@ -406,7 +398,7 @@ def from_api_repr(cls, resource): raise KeyError('Resource lacks required identity information:' '["datasetReference"]["datasetId"]') dataset_id = dsr['datasetId'] - dataset = cls(dataset_id, project=dsr['projectId']) + dataset = cls(DatasetReference(dsr['projectId'], dataset_id)) dataset._set_properties(resource) return dataset diff --git a/bigquery/google/cloud/bigquery/job.py b/bigquery/google/cloud/bigquery/job.py index 42f12ac39838..84cca80e22a0 100644 --- a/bigquery/google/cloud/bigquery/job.py +++ b/bigquery/google/cloud/bigquery/job.py @@ -842,8 +842,9 @@ def from_api_repr(cls, resource, client): """ job_id, config = cls._get_resource_config(resource) dest_config = config['destinationTable'] - dataset = Dataset(dest_config['datasetId'], - project=dest_config['projectId']) + ds_ref = DatasetReference(dest_config['projectId'], + dest_config['datasetId'],) + dataset = Dataset(ds_ref) table_ref = TableReference(dataset, dest_config['tableId']) destination = Table(table_ref, client=client) source_urls = config.get('sourceUris', ()) @@ -959,8 +960,9 @@ def from_api_repr(cls, resource, client): """ job_id, config = cls._get_resource_config(resource) dest_config = config['destinationTable'] - dataset = Dataset(dest_config['datasetId'], - project=dest_config['projectId']) + ds_ref = DatasetReference(dest_config['projectId'], + dest_config['datasetId'],) + dataset = Dataset(ds_ref) table_ref = TableReference(dataset, dest_config['tableId']) destination = Table(table_ref, client=client) sources = [] @@ -972,9 +974,9 @@ def from_api_repr(cls, resource, client): "Resource missing 'sourceTables' / 'sourceTable'") source_configs = [single] for source_config in source_configs: - dataset = Dataset(source_config['datasetId'], - project=source_config['projectId']) - table_ref = TableReference(dataset, source_config['tableId']) + ds_ref = DatasetReference(source_config['projectId'], + source_config['datasetId']) + table_ref = ds_ref.table(source_config['tableId']) sources.append(Table(table_ref, client=client)) job = cls(job_id, destination, sources, client=client) job._set_properties(resource) @@ -1426,7 +1428,8 @@ def _copy_configuration_properties(self, configuration): dest_local = self._destination_table_resource() if dest_remote != dest_local: project = dest_remote['projectId'] - dataset = Dataset(dest_remote['datasetId'], project=project) + dataset = Dataset(DatasetReference(project, + dest_remote['datasetId'])) self.destination = dataset.table(dest_remote['tableId']) def_ds = configuration.get('defaultDataset') @@ -1434,9 +1437,8 @@ def _copy_configuration_properties(self, configuration): if self.default_dataset is not None: del self.default_dataset else: - self.default_dataset = Dataset(def_ds['datasetId'], - project=def_ds['projectId']) - + self.default_dataset = Dataset( + DatasetReference(def_ds['projectId'], def_ds['datasetId'])) udf_resources = [] for udf_mapping in configuration.get(self._UDF_KEY, ()): key_val, = udf_mapping.items() @@ -1587,11 +1589,11 @@ def referenced_tables(self): t_project = table['projectId'] - ds_name = table['datasetId'] - t_dataset = datasets_by_project_name.get((t_project, ds_name)) + ds_id = table['datasetId'] + t_dataset = datasets_by_project_name.get((t_project, ds_id)) if t_dataset is None: - t_dataset = Dataset(ds_name, project=t_project) - datasets_by_project_name[(t_project, ds_name)] = t_dataset + t_dataset = DatasetReference(t_project, ds_id) + datasets_by_project_name[(t_project, ds_id)] = t_dataset t_name = table['tableId'] tables.append(t_dataset.table(t_name)) diff --git a/bigquery/tests/system.py b/bigquery/tests/system.py index ab579dc31aa9..f72954691018 100644 --- a/bigquery/tests/system.py +++ b/bigquery/tests/system.py @@ -112,8 +112,7 @@ def _still_in_use(bad_request): def test_create_dataset(self): DATASET_ID = _make_dataset_id('create_dataset') - dataset = retry_403(Config.CLIENT.create_dataset)(Dataset(DATASET_ID)) - self.to_delete.append(dataset) + dataset = self.temp_dataset(DATASET_ID) self.assertTrue(_dataset_exists(dataset)) self.assertEqual(dataset.dataset_id, DATASET_ID) @@ -122,7 +121,7 @@ def test_create_dataset(self): def test_get_dataset(self): DATASET_ID = _make_dataset_id('get_dataset') client = Config.CLIENT - dataset_arg = Dataset(DATASET_ID, project=client.project) + dataset_arg = Dataset(client.dataset(DATASET_ID)) dataset_arg.friendly_name = 'Friendly' dataset_arg.description = 'Description' dataset = retry_403(client.create_dataset)(dataset_arg) @@ -135,10 +134,7 @@ def test_get_dataset(self): self.assertEqual(got.description, 'Description') def test_update_dataset(self): - dataset = retry_403(Config.CLIENT.create_dataset)( - Dataset(_make_dataset_id('update_dataset'))) - self.to_delete.append(dataset) - + dataset = self.temp_dataset(_make_dataset_id('update_dataset')) self.assertTrue(_dataset_exists(dataset)) self.assertIsNone(dataset.friendly_name) self.assertIsNone(dataset.description) @@ -163,6 +159,7 @@ def test_update_dataset(self): self.assertEqual(ds3.labels, {'color': 'green', 'shape': 'circle'}) # TODO(jba): test that read-modify-write with ETag works. + def test_list_datasets(self): datasets_to_create = [ 'new' + unique_resource_id(), @@ -170,9 +167,7 @@ def test_list_datasets(self): 'newest' + unique_resource_id(), ] for dataset_id in datasets_to_create: - created_dataset = retry_403(Config.CLIENT.create_dataset)( - Dataset(dataset_id)) - self.to_delete.append(created_dataset) + self.temp_dataset(dataset_id) # Retrieve the datasets. iterator = Config.CLIENT.list_datasets() @@ -184,9 +179,7 @@ def test_list_datasets(self): self.assertEqual(len(created), len(datasets_to_create)) def test_create_table(self): - dataset = retry_403(Config.CLIENT.create_dataset)( - Dataset(_make_dataset_id('create_table'))) - self.to_delete.append(dataset) + dataset = self.temp_dataset(_make_dataset_id('create_table')) TABLE_NAME = 'test_table' full_name = bigquery.SchemaField('full_name', 'STRING', @@ -217,9 +210,7 @@ def test_get_table_w_public_dataset(self): def test_list_dataset_tables(self): DATASET_ID = _make_dataset_id('list_tables') - dataset = retry_403(Config.CLIENT.create_dataset)(Dataset(DATASET_ID)) - self.to_delete.append(dataset) - + dataset = self.temp_dataset(DATASET_ID) # Retrieve tables before any are created for the dataset. iterator = Config.CLIENT.list_dataset_tables(dataset) all_tables = list(iterator) @@ -252,9 +243,7 @@ def test_list_dataset_tables(self): self.assertEqual(len(created), len(tables_to_create)) def test_patch_table(self): - dataset = retry_403(Config.CLIENT.create_dataset)( - Dataset(_make_dataset_id('patch_table'))) - self.to_delete.append(dataset) + dataset = self.temp_dataset(_make_dataset_id('patch_table')) TABLE_NAME = 'test_table' full_name = bigquery.SchemaField('full_name', 'STRING', @@ -273,9 +262,7 @@ def test_patch_table(self): self.assertEqual(table.description, 'Description') def test_update_table(self): - dataset = retry_403(Config.CLIENT.create_dataset)( - Dataset(_make_dataset_id('update_table'))) - self.to_delete.append(dataset) + dataset = self.temp_dataset(_make_dataset_id('update_table')) TABLE_NAME = 'test_table' full_name = bigquery.SchemaField('full_name', 'STRING', @@ -316,10 +303,7 @@ def test_insert_data_then_dump_table(self): ] ROW_IDS = range(len(ROWS)) - dataset = retry_403(Config.CLIENT.create_dataset)( - Dataset(_make_dataset_id('insert_data_then_dump'))) - self.to_delete.append(dataset) - + dataset = self.temp_dataset(_make_dataset_id('insert_data_then_dump')) TABLE_NAME = 'test_table' full_name = bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED') @@ -358,10 +342,7 @@ def test_load_table_from_local_file_then_dump_table(self): ] TABLE_NAME = 'test_table' - dataset = retry_403(Config.CLIENT.create_dataset)( - Dataset(_make_dataset_id('load_local_then_dump'))) - self.to_delete.append(dataset) - + dataset = self.temp_dataset(_make_dataset_id('load_local_then_dump')) full_name = bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED') age = bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED') @@ -406,10 +387,7 @@ def test_load_table_from_local_avro_file_then_dump_table(self): ("orange", 590), ("red", 650)] - dataset = retry_403(Config.CLIENT.create_dataset)( - Dataset(_make_dataset_id('load_local_then_dump'))) - self.to_delete.append(dataset) - + dataset = self.temp_dataset(_make_dataset_id('load_local_then_dump')) table = Table(dataset.table(TABLE_NAME), client=Config.CLIENT) self.to_delete.insert(0, table) @@ -467,9 +445,7 @@ def test_load_table_from_storage_then_dump_table(self): self.to_delete.insert(0, blob) - dataset = retry_403(Config.CLIENT.create_dataset)( - Dataset(_make_dataset_id('load_gcs_then_dump'))) - self.to_delete.append(dataset) + dataset = self.temp_dataset(_make_dataset_id('load_gcs_then_dump')) full_name = bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED') @@ -536,10 +512,7 @@ def test_load_table_from_storage_w_autodetect_schema(self): self.to_delete.insert(0, blob) - dataset = retry_403(Config.CLIENT.create_dataset)( - Dataset(_make_dataset_id('load_gcs_then_dump'))) - self.to_delete.append(dataset) - + dataset = self.temp_dataset(_make_dataset_id('load_gcs_then_dump')) table_ref = dataset.table(table_name) job = Config.CLIENT.load_table_from_storage( @@ -589,9 +562,7 @@ def _load_table_for_extract_table( blob.upload_from_file(csv_read, content_type='text/csv') self.to_delete.insert(0, blob) - dataset = retry_403(Config.CLIENT.create_dataset)( - Dataset(table.dataset_id)) - self.to_delete.append(dataset) + dataset = self.temp_dataset(table.dataset_id) table_ref = dataset.table(table.table_id) job = Config.CLIENT.load_table_from_storage( 'bq_extract_storage_test_' + local_id, table_ref, gs_url) @@ -676,8 +647,7 @@ def test_job_cancel(self): TABLE_NAME = 'test_table' QUERY = 'SELECT * FROM %s.%s' % (DATASET_ID, TABLE_NAME) - dataset = retry_403(Config.CLIENT.create_dataset)(Dataset(DATASET_ID)) - self.to_delete.append(dataset) + dataset = self.temp_dataset(DATASET_ID) full_name = bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED') @@ -866,9 +836,7 @@ def test_dbapi_fetchall(self): def _load_table_for_dml(self, rows, dataset_id, table_id): from google.cloud._testing import _NamedTemporaryFile - dataset = retry_403(Config.CLIENT.create_dataset)(Dataset(dataset_id)) - self.to_delete.append(dataset) - + dataset = self.temp_dataset(dataset_id) greeting = bigquery.SchemaField( 'greeting', 'STRING', mode='NULLABLE') table = Table(dataset.table(table_id), schema=[greeting], @@ -1190,8 +1158,7 @@ def test_dump_table_w_public_data(self): DATASET_ID = 'samples' TABLE_NAME = 'natality' - dataset = Dataset(DATASET_ID, project=PUBLIC) - table_ref = dataset.table(TABLE_NAME) + table_ref = DatasetReference(PUBLIC, DATASET_ID).table(TABLE_NAME) table = Config.CLIENT.get_table(table_ref) self._fetch_single_page(table) @@ -1242,10 +1209,7 @@ def test_insert_nested_nested(self): ('Some value', record) ] table_name = 'test_table' - dataset = retry_403(Config.CLIENT.create_dataset)( - Dataset(_make_dataset_id('issue_2951'))) - self.to_delete.append(dataset) - + dataset = self.temp_dataset(_make_dataset_id('issue_2951')) table = Table(dataset.table(table_name), schema=schema, client=Config.CLIENT) table.create() @@ -1260,10 +1224,8 @@ def test_insert_nested_nested(self): def test_create_table_insert_fetch_nested_schema(self): table_name = 'test_table' - dataset = retry_403(Config.CLIENT.create_dataset)( - Dataset(_make_dataset_id('create_table_nested_schema'))) - self.to_delete.append(dataset) - + dataset = self.temp_dataset( + _make_dataset_id('create_table_nested_schema')) schema = _load_json_schema() table = Table(dataset.table(table_name), schema=schema, client=Config.CLIENT) @@ -1321,6 +1283,12 @@ def test_create_table_insert_fetch_nested_schema(self): e_favtime = datetime.datetime(*parts[0:6]) self.assertEqual(found[7], e_favtime) # FavoriteTime + def temp_dataset(self, dataset_id): + dataset = retry_403(Config.CLIENT.create_dataset)( + Dataset(Config.CLIENT.dataset(dataset_id))) + self.to_delete.append(dataset) + return dataset + def _job_done(instance): return instance.state.lower() == 'done' diff --git a/bigquery/tests/unit/test_client.py b/bigquery/tests/unit/test_client.py index ec12be72efae..50c324ebfc32 100644 --- a/bigquery/tests/unit/test_client.py +++ b/bigquery/tests/unit/test_client.py @@ -313,7 +313,7 @@ def test_create_dataset_minimal(self): creds = _make_credentials() client = self._make_one(project=PROJECT, credentials=creds) conn = client._connection = _Connection(RESOURCE) - ds = client.create_dataset(Dataset(DS_ID)) + ds = client.create_dataset(Dataset(client.dataset(DS_ID))) self.assertEqual(len(conn._requested), 1) req = conn._requested[0] self.assertEqual(req['method'], 'POST') @@ -364,7 +364,8 @@ def test_create_dataset_w_attrs(self): conn = client._connection = _Connection(RESOURCE) entries = [AccessEntry('OWNER', 'userByEmail', USER_EMAIL), AccessEntry(None, 'view', VIEW)] - ds_arg = Dataset(DS_ID, project=PROJECT, access_entries=entries) + ds_arg = Dataset(client.dataset(DS_ID)) + ds_arg.access_entries = entries ds_arg.description = DESCRIPTION ds_arg.friendly_name = FRIENDLY_NAME ds_arg.default_table_expiration_ms = 3600 @@ -434,7 +435,7 @@ def test_update_dataset_w_invalid_field(self): creds = _make_credentials() client = self._make_one(project=PROJECT, credentials=creds) with self.assertRaises(ValueError): - client.update_dataset(Dataset(DS_ID), ["foo"]) + client.update_dataset(Dataset(client.dataset(DS_ID)), ["foo"]) def test_update_dataset(self): from google.cloud.bigquery.dataset import Dataset @@ -460,7 +461,7 @@ def test_update_dataset(self): creds = _make_credentials() client = self._make_one(project=PROJECT, credentials=creds) conn = client._connection = _Connection(RESOURCE, RESOURCE) - ds = Dataset(DS_ID, project=PROJECT) + ds = Dataset(client.dataset(DS_ID)) ds.description = DESCRIPTION ds.friendly_name = FRIENDLY_NAME ds.location = LOCATION @@ -636,7 +637,8 @@ def test_delete_dataset(self): creds = _make_credentials() client = self._make_one(project=PROJECT, credentials=creds) conn = client._connection = _Connection({}, {}) - for arg in (client.dataset(DS_ID), Dataset(DS_ID, project=PROJECT)): + ds_ref = client.dataset(DS_ID) + for arg in (ds_ref, Dataset(ds_ref)): client.delete_dataset(arg) req = conn._requested[0] self.assertEqual(req['method'], 'DELETE') diff --git a/bigquery/tests/unit/test_dataset.py b/bigquery/tests/unit/test_dataset.py index 9d13ebb9bc4b..ced77990a65d 100644 --- a/bigquery/tests/unit/test_dataset.py +++ b/bigquery/tests/unit/test_dataset.py @@ -101,6 +101,12 @@ def test_ctor_defaults(self): self.assertEqual(dataset_ref.project, 'some-project-1') self.assertEqual(dataset_ref.dataset_id, 'dataset_1') + def test_ctor_bad_args(self): + with self.assertRaises(ValueError): + self._make_one(1, 'd') + with self.assertRaises(ValueError): + self._make_one('p', 2) + def test_table(self): dataset_ref = self._make_one('some-project-1', 'dataset_1') table_ref = dataset_ref.table('table_1') @@ -110,8 +116,11 @@ def test_table(self): class TestDataset(unittest.TestCase): + from google.cloud.bigquery.dataset import DatasetReference + PROJECT = 'project' DS_ID = 'dataset-id' + DS_REF = DatasetReference(PROJECT, DS_ID) @staticmethod def _get_target_class(): @@ -210,7 +219,7 @@ def _verify_resource_properties(self, dataset, resource): self.assertEqual(dataset.access_entries, []) def test_ctor_defaults(self): - dataset = self._make_one(self.DS_ID, project=self.PROJECT) + dataset = self._make_one(self.DS_REF) self.assertEqual(dataset.dataset_id, self.DS_ID) self.assertEqual(dataset.project, self.PROJECT) self.assertEqual( @@ -230,15 +239,14 @@ def test_ctor_defaults(self): self.assertIsNone(dataset.location) def test_ctor_explicit(self): - from google.cloud.bigquery.dataset import AccessEntry + from google.cloud.bigquery.dataset import DatasetReference, AccessEntry phred = AccessEntry('OWNER', 'userByEmail', 'phred@example.com') bharney = AccessEntry('OWNER', 'userByEmail', 'bharney@example.com') entries = [phred, bharney] OTHER_PROJECT = 'foo-bar-123' - dataset = self._make_one(self.DS_ID, - access_entries=entries, - project=OTHER_PROJECT) + dataset = self._make_one(DatasetReference(OTHER_PROJECT, self.DS_ID)) + dataset.access_entries = entries self.assertEqual(dataset.dataset_id, self.DS_ID) self.assertEqual(dataset.project, OTHER_PROJECT) self.assertEqual( @@ -258,14 +266,14 @@ def test_ctor_explicit(self): self.assertIsNone(dataset.location) def test_access_entries_setter_non_list(self): - dataset = self._make_one(self.DS_ID) + dataset = self._make_one(self.DS_REF) with self.assertRaises(TypeError): dataset.access_entries = object() def test_access_entries_setter_invalid_field(self): from google.cloud.bigquery.dataset import AccessEntry - dataset = self._make_one(self.DS_ID) + dataset = self._make_one(self.DS_REF) phred = AccessEntry('OWNER', 'userByEmail', 'phred@example.com') with self.assertRaises(ValueError): dataset.access_entries = [phred, object()] @@ -273,59 +281,59 @@ def test_access_entries_setter_invalid_field(self): def test_access_entries_setter(self): from google.cloud.bigquery.dataset import AccessEntry - dataset = self._make_one(self.DS_ID) + dataset = self._make_one(self.DS_REF) phred = AccessEntry('OWNER', 'userByEmail', 'phred@example.com') bharney = AccessEntry('OWNER', 'userByEmail', 'bharney@example.com') dataset.access_entries = [phred, bharney] self.assertEqual(dataset.access_entries, [phred, bharney]) def test_default_table_expiration_ms_setter_bad_value(self): - dataset = self._make_one(self.DS_ID) + dataset = self._make_one(self.DS_REF) with self.assertRaises(ValueError): dataset.default_table_expiration_ms = 'bogus' def test_default_table_expiration_ms_setter(self): - dataset = self._make_one(self.DS_ID) + dataset = self._make_one(self.DS_REF) dataset.default_table_expiration_ms = 12345 self.assertEqual(dataset.default_table_expiration_ms, 12345) def test_description_setter_bad_value(self): - dataset = self._make_one(self.DS_ID) + dataset = self._make_one(self.DS_REF) with self.assertRaises(ValueError): dataset.description = 12345 def test_description_setter(self): - dataset = self._make_one(self.DS_ID) + dataset = self._make_one(self.DS_REF) dataset.description = 'DESCRIPTION' self.assertEqual(dataset.description, 'DESCRIPTION') def test_friendly_name_setter_bad_value(self): - dataset = self._make_one(self.DS_ID) + dataset = self._make_one(self.DS_REF) with self.assertRaises(ValueError): dataset.friendly_name = 12345 def test_friendly_name_setter(self): - dataset = self._make_one(self.DS_ID) + dataset = self._make_one(self.DS_REF) dataset.friendly_name = 'FRIENDLY' self.assertEqual(dataset.friendly_name, 'FRIENDLY') def test_location_setter_bad_value(self): - dataset = self._make_one(self.DS_ID) + dataset = self._make_one(self.DS_REF) with self.assertRaises(ValueError): dataset.location = 12345 def test_location_setter(self): - dataset = self._make_one(self.DS_ID) + dataset = self._make_one(self.DS_REF) dataset.location = 'LOCATION' self.assertEqual(dataset.location, 'LOCATION') def test_labels_setter(self): - dataset = self._make_one(self.DS_ID) + dataset = self._make_one(self.DS_REF) dataset.labels = {'color': 'green'} self.assertEqual(dataset.labels, {'color': 'green'}) def test_labels_setter_bad_value(self): - dataset = self._make_one(self.DS_ID) + dataset = self._make_one(self.DS_REF) with self.assertRaises(ValueError): dataset.labels = None @@ -359,7 +367,7 @@ def test__parse_access_entries_w_unknown_entity_type(self): ACCESS = [ {'role': 'READER', 'unknown': 'UNKNOWN'}, ] - dataset = self._make_one(self.DS_ID) + dataset = self._make_one(self.DS_REF) with self.assertRaises(ValueError): dataset._parse_access_entries(ACCESS) @@ -372,14 +380,14 @@ def test__parse_access_entries_w_extra_keys(self): 'userByEmail': USER_EMAIL, }, ] - dataset = self._make_one(self.DS_ID) + dataset = self._make_one(self.DS_REF) with self.assertRaises(ValueError): dataset._parse_access_entries(ACCESS) def test_table(self): from google.cloud.bigquery.table import TableReference - dataset = self._make_one(self.DS_ID, project=self.PROJECT) + dataset = self._make_one(self.DS_REF) table = dataset.table('table_id') self.assertIsInstance(table, TableReference) self.assertEqual(table.table_id, 'table_id') diff --git a/bigquery/tests/unit/test_job.py b/bigquery/tests/unit/test_job.py index 470e802d1150..029db44cd534 100644 --- a/bigquery/tests/unit/test_job.py +++ b/bigquery/tests/unit/test_job.py @@ -2139,7 +2139,7 @@ def test_begin_w_bound_client(self): client = _Client(project=self.PROJECT, connection=conn) job = self._make_one(self.JOB_NAME, self.QUERY, client) - job.default_dataset = Dataset(DS_ID, project=self.PROJECT) + job.default_dataset = Dataset(DatasetReference(self.PROJECT, DS_ID)) job.begin() @@ -2204,7 +2204,7 @@ def test_begin_w_alternate_client(self): job = self._make_one(self.JOB_NAME, self.QUERY, client1) dataset_ref = DatasetReference(self.PROJECT, DS_ID) - dataset = Dataset(DS_ID, project=self.PROJECT) + dataset = Dataset(dataset_ref) table_ref = dataset_ref.table(TABLE) job.allow_large_results = True diff --git a/bigquery/tests/unit/test_query.py b/bigquery/tests/unit/test_query.py index ee2783744c94..73f23cb1bf6a 100644 --- a/bigquery/tests/unit/test_query.py +++ b/bigquery/tests/unit/test_query.py @@ -196,17 +196,18 @@ def test_ctor_w_query_parameters(self): self.assertEqual(query.query_parameters, query_parameters) def test_from_query_job(self): - from google.cloud.bigquery.dataset import Dataset + from google.cloud.bigquery.dataset import Dataset, DatasetReference from google.cloud.bigquery.job import QueryJob from google.cloud.bigquery._helpers import UDFResource - DS_NAME = 'DATASET' + DS_ID = 'DATASET' RESOURCE_URI = 'gs://some-bucket/js/lib.js' client = _Client(self.PROJECT) job = QueryJob( self.JOB_NAME, self.QUERY, client, udf_resources=[UDFResource("resourceUri", RESOURCE_URI)]) - dataset = job.default_dataset = Dataset(DS_NAME) + dataset = Dataset(DatasetReference(self.PROJECT, DS_ID)) + job.default_dataset = dataset job.use_query_cache = True job.use_legacy_sql = True klass = self._get_target_class() @@ -741,10 +742,10 @@ def __init__(self, project='project', connection=None): self.project = project self._connection = connection - def dataset(self, name): - from google.cloud.bigquery.dataset import Dataset + def dataset(self, dataset_id): + from google.cloud.bigquery.dataset import Dataset, DatasetReference - return Dataset(name) + return Dataset(DatasetReference(self.project, dataset_id)) class _Connection(object):