From c346973f37bb354808747417c5b2d960bbfa433c Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 4 Apr 2017 14:47:03 -0700 Subject: [PATCH 1/3] Remove cloud config fixture --- appengine/standard/background/main_test.py | 8 +- appengine/standard/bigquery/main_test.py | 7 +- .../standard/storage/api-client/main_test.py | 7 +- .../storage/appengine-client/main_test.py | 8 +- bigquery/api/async_query_test.py | 11 ++- .../api/export_data_to_cloud_storage_test.py | 25 +++--- bigquery/api/getting_started_test.py | 7 +- bigquery/api/installed_app_test.py | 10 ++- bigquery/api/list_datasets_projects_test.py | 7 +- bigquery/api/load_data_by_post_test.py | 10 ++- bigquery/api/load_data_from_csv_test.py | 11 ++- bigquery/api/streaming_test.py | 7 +- bigquery/api/sync_query_test.py | 11 ++- bigquery/cloud-client/async_query_test.py | 2 +- .../cloud-client/export_data_to_gcs_test.py | 9 ++- .../cloud-client/load_data_from_gcs_test.py | 8 +- .../cloud-client/sync_query_params_test.py | 4 +- bigquery/cloud-client/sync_query_test.py | 2 +- bigquery/dml/insert_sql_test.py | 7 +- bigquery/rest/labels_test.py | 12 ++- bigtable/hello/main_test.py | 10 +-- bigtable/hello_happybase/main_test.py | 9 ++- .../blog_test.py | 8 +- .../wiki_test.py | 8 +- compute/api/create_instance_test.py | 10 ++- compute/auth/access_token_test.py | 8 +- compute/auth/application_default_test.py | 8 +- .../generate_wrapped_rsa_key_test.py | 8 +- conftest.py | 28 ++----- dataproc/dataproc_e2e_test.py | 8 +- datastore/cloud-client/snippets_test.py | 8 +- datastore/cloud-client/tasks_test.py | 12 ++- dns/api/main_test.py | 33 ++++---- iap/iap_test.py | 2 +- kms/api-client/snippets_test.py | 42 +++++----- language/cloud-client/snippets_test.py | 28 +++---- language/ocr_nl/main_test.py | 15 ++-- logging/api-client/list_logs_test.py | 7 +- logging/cloud-client/export_test.py | 11 +-- monitoring/api/v2/auth_test.py | 7 +- .../api/v2/labeled_custom_metric_test.py | 7 +- .../api/v2/lightweight_custom_metric_test.py | 7 +- .../api/v3/api-client/custom_metric_test.py | 7 +- .../api/v3/api-client/list_resources_test.py | 14 ++-- spanner/cloud-client/quickstart_test.py | 12 ++- spanner/cloud-client/snippets_test.py | 79 ++++++++----------- speech/grpc/transcribe_async_test.py | 8 +- speech/grpc/transcribe_test.py | 8 +- storage/api/compose_objects_test.py | 8 +- storage/api/crud_object_test.py | 7 +- storage/api/customer_supplied_keys_test.py | 7 +- storage/api/list_objects_test.py | 8 +- storage/cloud-client/acl_test.py | 51 ++++++------ storage/cloud-client/encryption_test.py | 20 ++--- storage/cloud-client/snippets_test.py | 47 +++++------ translate/cloud-client/snippets_test.py | 8 +- video/cloud-client/analyze_test.py | 16 ++-- .../crop_hints/crop_hints_test.py | 4 +- vision/cloud-client/detect/detect_test.py | 72 ++++++++--------- .../document_text/doctext_test.py | 2 +- vision/cloud-client/web/web_detect_test.py | 12 ++- 61 files changed, 478 insertions(+), 369 deletions(-) diff --git a/appengine/standard/background/main_test.py b/appengine/standard/background/main_test.py index 139f528adde8..aa3b78caac72 100644 --- a/appengine/standard/background/main_test.py +++ b/appengine/standard/background/main_test.py @@ -12,16 +12,20 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os + from mock import patch import pytest import webtest import main +PROJECT = os.environ['GCLOUD_PROJECT'] + @pytest.fixture -def app(cloud_config, testbed): - main.PROJECTID = cloud_config.project +def app(testbed): + main.PROJECTID = PROJECT return webtest.TestApp(main.app) diff --git a/appengine/standard/bigquery/main_test.py b/appengine/standard/bigquery/main_test.py index 470eddf13d20..dcad3391bb01 100644 --- a/appengine/standard/bigquery/main_test.py +++ b/appengine/standard/bigquery/main_test.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import re from googleapiclient.http import HttpMock @@ -21,10 +22,12 @@ import main +PROJECT = os.environ['GCLOUD_PROJECT'] + @pytest.fixture -def app(cloud_config, testbed): - main.PROJECTID = cloud_config.project +def app(testbed): + main.PROJECTID = PROJECT return webtest.TestApp(main.app) diff --git a/appengine/standard/storage/api-client/main_test.py b/appengine/standard/storage/api-client/main_test.py index 7bb5c8c595ba..1fc960987f64 100644 --- a/appengine/standard/storage/api-client/main_test.py +++ b/appengine/standard/storage/api-client/main_test.py @@ -12,15 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import re import webtest import main +PROJECT = os.environ['GCLOUD_PROJECT'] -def test_get(cloud_config): - main.BUCKET_NAME = cloud_config.project + +def test_get(): + main.BUCKET_NAME = PROJECT app = webtest.TestApp(main.app) response = app.get('/') diff --git a/appengine/standard/storage/appengine-client/main_test.py b/appengine/standard/storage/appengine-client/main_test.py index 57db55cde736..c3a05cecc574 100644 --- a/appengine/standard/storage/appengine-client/main_test.py +++ b/appengine/standard/storage/appengine-client/main_test.py @@ -12,13 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os + import webtest import main +PROJECT = os.environ['GCLOUD_PROJECT'] + -def test_get(testbed, cloud_config): - main.BUCKET_NAME = cloud_config.project +def test_get(testbed): + main.BUCKET_NAME = PROJECT app = webtest.TestApp(main.app) response = app.get('/') diff --git a/bigquery/api/async_query_test.py b/bigquery/api/async_query_test.py index f2f6106fd5e5..6709b47c0e20 100644 --- a/bigquery/api/async_query_test.py +++ b/bigquery/api/async_query_test.py @@ -12,17 +12,20 @@ # limitations under the License. import json +import os from async_query import main +PROJECT = os.environ['GCLOUD_PROJECT'] -def test_async_query(cloud_config, capsys): + +def test_async_query(capsys): query = ( 'SELECT corpus FROM publicdata:samples.shakespeare ' 'GROUP BY corpus;') main( - project_id=cloud_config.project, + project_id=PROJECT, query_string=query, batch=False, num_retries=5, @@ -35,11 +38,11 @@ def test_async_query(cloud_config, capsys): assert json.loads(value) is not None -def test_async_query_standard_sql(cloud_config, capsys): +def test_async_query_standard_sql(capsys): query = 'SELECT [1, 2, 3] AS arr;' # Only valid in standard SQL main( - project_id=cloud_config.project, + project_id=PROJECT, query_string=query, batch=False, num_retries=5, diff --git a/bigquery/api/export_data_to_cloud_storage_test.py b/bigquery/api/export_data_to_cloud_storage_test.py index 33636fc9e30f..88743d53a085 100644 --- a/bigquery/api/export_data_to_cloud_storage_test.py +++ b/bigquery/api/export_data_to_cloud_storage_test.py @@ -11,21 +11,24 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os + from gcp.testing.flaky import flaky from export_data_to_cloud_storage import main +PROJECT = os.environ['GCLOUD_PROJECT'] +BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] DATASET_ID = 'test_dataset' TABLE_ID = 'test_table' @flaky -def test_export_table_csv(cloud_config): - cloud_storage_output_uri = \ - 'gs://{}/output.csv'.format(cloud_config.storage_bucket) +def test_export_table_csv(): + cloud_storage_output_uri = 'gs://{}/output.csv'.format(BUCKET) main( cloud_storage_output_uri, - cloud_config.project, + PROJECT, DATASET_ID, TABLE_ID, num_retries=5, @@ -34,12 +37,11 @@ def test_export_table_csv(cloud_config): @flaky -def test_export_table_json(cloud_config): - cloud_storage_output_uri = \ - 'gs://{}/output.json'.format(cloud_config.storage_bucket) +def test_export_table_json(): + cloud_storage_output_uri = 'gs://{}/output.json'.format(BUCKET) main( cloud_storage_output_uri, - cloud_config.project, + PROJECT, DATASET_ID, TABLE_ID, num_retries=5, @@ -48,12 +50,11 @@ def test_export_table_json(cloud_config): @flaky -def test_export_table_avro(cloud_config): - cloud_storage_output_uri = \ - 'gs://{}/output.avro'.format(cloud_config.storage_bucket) +def test_export_table_avro(): + cloud_storage_output_uri = 'gs://{}/output.avro'.format(BUCKET) main( cloud_storage_output_uri, - cloud_config.project, + PROJECT, DATASET_ID, TABLE_ID, num_retries=5, diff --git a/bigquery/api/getting_started_test.py b/bigquery/api/getting_started_test.py index 8f0866f46d10..a2cfbf2d706e 100644 --- a/bigquery/api/getting_started_test.py +++ b/bigquery/api/getting_started_test.py @@ -11,13 +11,16 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import re from getting_started import main +PROJECT = os.environ['GCLOUD_PROJECT'] -def test_main(cloud_config, capsys): - main(cloud_config.project) + +def test_main(capsys): + main(PROJECT) out, _ = capsys.readouterr() diff --git a/bigquery/api/installed_app_test.py b/bigquery/api/installed_app_test.py index d52c1e3e883b..313f46569769 100644 --- a/bigquery/api/installed_app_test.py +++ b/bigquery/api/installed_app_test.py @@ -11,20 +11,24 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import re from oauth2client.client import GoogleCredentials import installed_app +PROJECT = os.environ['GCLOUD_PROJECT'] +CLIENT_SECRETS = os.environ['CLIENT_SECRETS'] + class Namespace(object): def __init__(self, **kwargs): self.__dict__.update(kwargs) -def test_main(cloud_config, monkeypatch, capsys): - installed_app.CLIENT_SECRETS = cloud_config.client_secrets +def test_main(monkeypatch, capsys): + installed_app.CLIENT_SECRETS = CLIENT_SECRETS # Replace the user credentials flow with Application Default Credentials. # Unfortunately, there's no easy way to fully test the user flow. @@ -34,7 +38,7 @@ def mock_run_flow(flow, storage, args): monkeypatch.setattr(installed_app.tools, 'run_flow', mock_run_flow) args = Namespace( - project_id=cloud_config.project, + project_id=PROJECT, logging_level='INFO', noauth_local_webserver=True) diff --git a/bigquery/api/list_datasets_projects_test.py b/bigquery/api/list_datasets_projects_test.py index e096a0342b25..268245239449 100644 --- a/bigquery/api/list_datasets_projects_test.py +++ b/bigquery/api/list_datasets_projects_test.py @@ -11,13 +11,16 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import re from list_datasets_projects import main +PROJECT = os.environ['GCLOUD_PROJECT'] -def test_main(cloud_config, capsys): - main(cloud_config.project) + +def test_main(capsys): + main(PROJECT) out, _ = capsys.readouterr() diff --git a/bigquery/api/load_data_by_post_test.py b/bigquery/api/load_data_by_post_test.py index 544adb94aded..b2d2e7706483 100644 --- a/bigquery/api/load_data_by_post_test.py +++ b/bigquery/api/load_data_by_post_test.py @@ -11,25 +11,27 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import re from gcp.testing.flaky import flaky from load_data_by_post import load_data +PROJECT = os.environ['GCLOUD_PROJECT'] DATASET_ID = 'ephemeral_test_dataset' TABLE_ID = 'load_data_by_post' @flaky -def test_load_csv_data(cloud_config, resource, capsys): +def test_load_csv_data(resource, capsys): schema_path = resource('schema.json') data_path = resource('data.csv') load_data( schema_path, data_path, - cloud_config.project, + PROJECT, DATASET_ID, TABLE_ID ) @@ -41,14 +43,14 @@ def test_load_csv_data(cloud_config, resource, capsys): @flaky -def test_load_json_data(cloud_config, resource, capsys): +def test_load_json_data(resource, capsys): schema_path = resource('schema.json') data_path = resource('data.json') load_data( schema_path, data_path, - cloud_config.project, + PROJECT, DATASET_ID, TABLE_ID ) diff --git a/bigquery/api/load_data_from_csv_test.py b/bigquery/api/load_data_from_csv_test.py index 988af935ac3d..8154b5d44284 100644 --- a/bigquery/api/load_data_from_csv_test.py +++ b/bigquery/api/load_data_from_csv_test.py @@ -11,22 +11,25 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os + from gcp.testing.flaky import flaky from load_data_from_csv import main +PROJECT = os.environ['GCLOUD_PROJECT'] +BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] DATASET_ID = 'test_dataset' TABLE_ID = 'test_import_table' @flaky -def test_load_table(cloud_config, resource): - cloud_storage_input_uri = 'gs://{}/data.csv'.format( - cloud_config.storage_bucket) +def test_load_table(resource): + cloud_storage_input_uri = 'gs://{}/data.csv'.format(BUCKET) schema_file = resource('schema.json') main( - cloud_config.project, + PROJECT, DATASET_ID, TABLE_ID, schema_file=schema_file, diff --git a/bigquery/api/streaming_test.py b/bigquery/api/streaming_test.py index 66347da7cc01..97ddbc19dd42 100644 --- a/bigquery/api/streaming_test.py +++ b/bigquery/api/streaming_test.py @@ -12,22 +12,23 @@ # limitations under the License. import json +import os import streaming - +PROJECT = os.environ['GCLOUD_PROJECT'] DATASET_ID = 'test_dataset' TABLE_ID = 'test_table' -def test_stream_row_to_bigquery(cloud_config, resource, capsys): +def test_stream_row_to_bigquery(resource, capsys): with open(resource('streamrows.json'), 'r') as rows_file: rows = json.load(rows_file) streaming.get_rows = lambda: rows streaming.main( - cloud_config.project, + PROJECT, DATASET_ID, TABLE_ID, num_retries=5) diff --git a/bigquery/api/sync_query_test.py b/bigquery/api/sync_query_test.py index 98189793795a..d460918e7a81 100644 --- a/bigquery/api/sync_query_test.py +++ b/bigquery/api/sync_query_test.py @@ -12,17 +12,20 @@ # limitations under the License. import json +import os from sync_query import main +PROJECT = os.environ['GCLOUD_PROJECT'] -def test_sync_query(cloud_config, capsys): + +def test_sync_query(capsys): query = ( 'SELECT corpus FROM publicdata:samples.shakespeare ' 'GROUP BY corpus;') main( - project_id=cloud_config.project, + project_id=PROJECT, query=query, timeout=30, num_retries=5, @@ -34,11 +37,11 @@ def test_sync_query(cloud_config, capsys): assert json.loads(result) is not None -def test_sync_query_standard_sql(cloud_config, capsys): +def test_sync_query_standard_sql(capsys): query = 'SELECT [1, 2, 3] AS arr;' # Only valid in standard SQL main( - project_id=cloud_config.project, + project_id=PROJECT, query=query, timeout=30, num_retries=5, diff --git a/bigquery/cloud-client/async_query_test.py b/bigquery/cloud-client/async_query_test.py index 810c538a6da7..85ce3fce963e 100644 --- a/bigquery/cloud-client/async_query_test.py +++ b/bigquery/cloud-client/async_query_test.py @@ -15,7 +15,7 @@ from async_query import async_query -def test_async_query(cloud_config, capsys): +def test_async_query(capsys): query = ( 'SELECT corpus FROM `publicdata.samples.shakespeare` ' 'GROUP BY corpus;') diff --git a/bigquery/cloud-client/export_data_to_gcs_test.py b/bigquery/cloud-client/export_data_to_gcs_test.py index acbbe50e55e0..a41cfd226fc7 100644 --- a/bigquery/cloud-client/export_data_to_gcs_test.py +++ b/bigquery/cloud-client/export_data_to_gcs_test.py @@ -11,19 +11,20 @@ # See the License for the specific language governing permissions and # limitations under the License. -import export_data_to_gcs +import os +import export_data_to_gcs +BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] DATASET_ID = 'test_dataset' TABLE_ID = 'test_table' -def test_export_data_to_gcs(cloud_config, capsys): +def test_export_data_to_gcs(capsys): export_data_to_gcs.export_data_to_gcs( DATASET_ID, TABLE_ID, - 'gs://{}/test-export-data-to-gcs.csv'.format( - cloud_config.storage_bucket)) + 'gs://{}/test-export-data-to-gcs.csv'.format(BUCKET)) out, _ = capsys.readouterr() diff --git a/bigquery/cloud-client/load_data_from_gcs_test.py b/bigquery/cloud-client/load_data_from_gcs_test.py index 2d1c66162c0b..dbd39fc5ceca 100644 --- a/bigquery/cloud-client/load_data_from_gcs_test.py +++ b/bigquery/cloud-client/load_data_from_gcs_test.py @@ -11,15 +11,17 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os + import load_data_from_gcs +BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] DATASET_ID = 'test_dataset' TABLE_ID = 'test_import_table' -def test_load_table(cloud_config, capsys): - cloud_storage_input_uri = 'gs://{}/data.csv'.format( - cloud_config.storage_bucket) +def test_load_table(capsys): + cloud_storage_input_uri = 'gs://{}/data.csv'.format(BUCKET) load_data_from_gcs.load_data_from_gcs( DATASET_ID, diff --git a/bigquery/cloud-client/sync_query_params_test.py b/bigquery/cloud-client/sync_query_params_test.py index 270dfc62a580..d87fe8231f4a 100644 --- a/bigquery/cloud-client/sync_query_params_test.py +++ b/bigquery/cloud-client/sync_query_params_test.py @@ -15,7 +15,7 @@ import sync_query_params -def test_sync_query_named_params(cloud_config, capsys): +def test_sync_query_named_params(capsys): sync_query_params.sync_query_named_params( corpus='romeoandjuliet', min_word_count=100) @@ -23,7 +23,7 @@ def test_sync_query_named_params(cloud_config, capsys): assert 'love' in out -def test_sync_query_positional_params(cloud_config, capsys): +def test_sync_query_positional_params(capsys): sync_query_params.sync_query_positional_params( corpus='romeoandjuliet', min_word_count=100) diff --git a/bigquery/cloud-client/sync_query_test.py b/bigquery/cloud-client/sync_query_test.py index 6f6b4f5fa8e8..26c8973e4bfe 100644 --- a/bigquery/cloud-client/sync_query_test.py +++ b/bigquery/cloud-client/sync_query_test.py @@ -15,7 +15,7 @@ from sync_query import sync_query -def test_sync_query(cloud_config, capsys): +def test_sync_query(capsys): query = ( 'SELECT corpus FROM `publicdata.samples.shakespeare` ' 'GROUP BY corpus;') diff --git a/bigquery/dml/insert_sql_test.py b/bigquery/dml/insert_sql_test.py index b2e295906acb..c85aeff17262 100644 --- a/bigquery/dml/insert_sql_test.py +++ b/bigquery/dml/insert_sql_test.py @@ -12,18 +12,21 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import os.path from insert_sql import insert_sql +PROJECT = os.environ['GCLOUD_PROJECT'] -def test_insert_sql(cloud_config, capsys): + +def test_insert_sql(capsys): sql_path = os.path.join( os.path.dirname(__file__), 'resources', 'insert_sql_test.sql') - insert_sql(cloud_config.project, 'test_dataset', sql_path) + insert_sql(PROJECT, 'test_dataset', sql_path) out, _ = capsys.readouterr() diff --git a/bigquery/rest/labels_test.py b/bigquery/rest/labels_test.py index 360d4a61e612..ca40587bcf82 100644 --- a/bigquery/rest/labels_test.py +++ b/bigquery/rest/labels_test.py @@ -11,15 +11,19 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os + from labels import label_dataset, label_table +PROJECT = os.environ['GCLOUD_PROJECT'] + -def test_label_dataset(cloud_config, capsys): +def test_label_dataset(capsys): label_dataset( 'test_dataset', 'environment', 'test', - project_id=cloud_config.project) + project_id=PROJECT) out, _ = capsys.readouterr() result = out.split('\n')[0] @@ -27,13 +31,13 @@ def test_label_dataset(cloud_config, capsys): assert 'Updated label "environment" with value "test"' in result -def test_label_table(cloud_config, capsys): +def test_label_table(capsys): label_table( 'test_dataset', 'test_table', 'data-owner', 'my-team', - project_id=cloud_config.project) + project_id=PROJECT) out, _ = capsys.readouterr() result = out.split('\n')[0] diff --git a/bigtable/hello/main_test.py b/bigtable/hello/main_test.py index c0968dd67992..98c4dd48ff3b 100644 --- a/bigtable/hello/main_test.py +++ b/bigtable/hello/main_test.py @@ -12,22 +12,22 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import random from main import main +PROJECT = os.environ['GCLOUD_PROJECT'] +BIGTABLE_INSTANCE = os.environ['BIGTABLE_INSTANCE'] TABLE_NAME_FORMAT = 'hello-bigtable-system-tests-{}' TABLE_NAME_RANGE = 10000 -def test_main(cloud_config, capsys): +def test_main(capsys): table_name = TABLE_NAME_FORMAT.format( random.randrange(TABLE_NAME_RANGE)) - main( - cloud_config.project, - cloud_config.bigtable_instance, - table_name) + main(PROJECT, BIGTABLE_INSTANCE, table_name) out, _ = capsys.readouterr() assert 'Creating the {} table.'.format(table_name) in out diff --git a/bigtable/hello_happybase/main_test.py b/bigtable/hello_happybase/main_test.py index 6e58dac4c3e5..2d4f4bdbf798 100644 --- a/bigtable/hello_happybase/main_test.py +++ b/bigtable/hello_happybase/main_test.py @@ -12,20 +12,23 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import random from main import main +PROJECT = os.environ['GCLOUD_PROJECT'] +BIGTABLE_INSTANCE = os.environ['BIGTABLE_INSTANCE'] TABLE_NAME_FORMAT = 'hello_happybase-system-tests-{}' TABLE_NAME_RANGE = 10000 -def test_main(cloud_config, capsys): +def test_main(capsys): table_name = TABLE_NAME_FORMAT.format( random.randrange(TABLE_NAME_RANGE)) main( - cloud_config.project, - cloud_config.bigtable_instance, + PROJECT, + BIGTABLE_INSTANCE, table_name) out, _ = capsys.readouterr() diff --git a/blog/introduction_to_data_models_in_cloud_datastore/blog_test.py b/blog/introduction_to_data_models_in_cloud_datastore/blog_test.py index 04eb0c4a8fed..f0c7960673cf 100644 --- a/blog/introduction_to_data_models_in_cloud_datastore/blog_test.py +++ b/blog/introduction_to_data_models_in_cloud_datastore/blog_test.py @@ -11,11 +11,15 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os + from gcp.testing.flaky import flaky from blog import main +PROJECT = os.environ['GCLOUD_PROJECT'] + @flaky -def test_main(cloud_config): - main(cloud_config.project) +def test_main(): + main(PROJECT) diff --git a/blog/introduction_to_data_models_in_cloud_datastore/wiki_test.py b/blog/introduction_to_data_models_in_cloud_datastore/wiki_test.py index 1208fc038ce6..4b0fed1f759e 100644 --- a/blog/introduction_to_data_models_in_cloud_datastore/wiki_test.py +++ b/blog/introduction_to_data_models_in_cloud_datastore/wiki_test.py @@ -11,11 +11,15 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os + from gcp.testing.flaky import flaky from wiki import main +PROJECT = os.environ['GCLOUD_PROJECT'] + @flaky -def test_main(cloud_config): - main(cloud_config.project) +def test_main(): + main(PROJECT) diff --git a/compute/api/create_instance_test.py b/compute/api/create_instance_test.py index b458b46387f7..dcff01224888 100644 --- a/compute/api/create_instance_test.py +++ b/compute/api/create_instance_test.py @@ -11,18 +11,22 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import re from gcp.testing.flaky import flaky from create_instance import main +PROJECT = os.environ['GCLOUD_PROJECT'] +BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] + @flaky -def test_main(cloud_config, capsys): +def test_main(capsys): main( - cloud_config.project, - cloud_config.storage_bucket, + PROJECT, + BUCKET, 'us-central1-f', 'test-instance', wait=False) diff --git a/compute/auth/access_token_test.py b/compute/auth/access_token_test.py index e266b1714011..4d65b020da8e 100644 --- a/compute/auth/access_token_test.py +++ b/compute/auth/access_token_test.py @@ -11,13 +11,17 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os + import mock import access_token +PROJECT = os.environ['GCLOUD_PROJECT'] + @mock.patch('access_token.requests') -def test_main(requests_mock, cloud_config): +def test_main(requests_mock): metadata_response = mock.Mock() metadata_response.status_code = 200 metadata_response.json.return_value = { @@ -30,6 +34,6 @@ def test_main(requests_mock, cloud_config): requests_mock.get.side_effect = [ metadata_response, bucket_response] - access_token.main(cloud_config.project) + access_token.main(PROJECT) assert requests_mock.get.call_count == 2 diff --git a/compute/auth/application_default_test.py b/compute/auth/application_default_test.py index 0336c329148b..6d23b71639c1 100644 --- a/compute/auth/application_default_test.py +++ b/compute/auth/application_default_test.py @@ -11,8 +11,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os + from application_default import main +PROJECT = os.environ['GCLOUD_PROJECT'] + -def test_main(cloud_config): - main(cloud_config.project) +def test_main(): + main(PROJECT) diff --git a/compute/encryption/generate_wrapped_rsa_key_test.py b/compute/encryption/generate_wrapped_rsa_key_test.py index bbf74791e9b8..ea5ed6ea524b 100644 --- a/compute/encryption/generate_wrapped_rsa_key_test.py +++ b/compute/encryption/generate_wrapped_rsa_key_test.py @@ -17,12 +17,14 @@ import generate_wrapped_rsa_key +PROJECT = os.environ['GCLOUD_PROJECT'] + def test_main(): generate_wrapped_rsa_key.main(None) -def test_create_disk(cloud_config): +def test_create_disk(): compute = googleapiclient.discovery.build('compute', 'beta') # Generate the key. @@ -33,7 +35,7 @@ def test_create_disk(cloud_config): # Create the disk, if the encryption key is invalid, this will raise. compute.disks().insert( - project=cloud_config.project, + project=PROJECT, zone='us-central1-f', body={ 'name': 'new-encrypted-disk', @@ -44,6 +46,6 @@ def test_create_disk(cloud_config): # Delete the disk. compute.disks().delete( - project=cloud_config.project, + project=PROJECT, zone='us-central1-f', disk='new-encrypted-disk').execute() diff --git a/conftest.py b/conftest.py index a4f3c1008f8a..de49705889d6 100644 --- a/conftest.py +++ b/conftest.py @@ -18,22 +18,8 @@ import pytest import requests - -class Namespace(object): - def __init__(self, **kwargs): - self.__dict__.update(kwargs) - - -@pytest.fixture(scope='session') -def cloud_config(): - """Provides a configuration object as a proxy to environment variables.""" - return Namespace( - project=os.environ.get('GCLOUD_PROJECT'), - storage_bucket=os.environ.get('CLOUD_STORAGE_BUCKET'), - client_secrets=os.environ.get('GOOGLE_CLIENT_SECRETS'), - bigtable_instance=os.environ.get('BIGTABLE_CLUSTER'), - spanner_instance=os.environ.get('SPANNER_INSTANCE'), - api_key=os.environ.get('API_KEY')) +PROJECT = os.environ['GCLOUD_PROJECT'] +BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] def get_resource_path(resource, local_path): @@ -65,26 +51,26 @@ def fetch_gcs_resource(resource, tmpdir, _chunk_size=1024): @pytest.fixture(scope='module') -def remote_resource(cloud_config): +def remote_resource(): """Provides a function that downloads the given resource from Cloud Storage, returning the path to the downloaded resource.""" remote_uri = 'http://storage.googleapis.com/{}/'.format( - cloud_config.storage_bucket) + BUCKET) return lambda path, tmpdir: fetch_gcs_resource( remote_uri + path.strip('/'), tmpdir) @pytest.fixture -def api_client_inject_project_id(cloud_config): +def api_client_inject_project_id(): """Patches all googleapiclient requests to replace 'YOUR_PROJECT_ID' with - the project ID from cloud_config.""" + the project ID.""" import googleapiclient.http old_execute = googleapiclient.http.HttpRequest.execute def new_execute(self, http=None, num_retries=0): - self.uri = self.uri.replace('YOUR_PROJECT_ID', cloud_config.project) + self.uri = self.uri.replace('YOUR_PROJECT_ID', PROJECT) return old_execute(self, http=http, num_retries=num_retries) with mock.patch( diff --git a/dataproc/dataproc_e2e_test.py b/dataproc/dataproc_e2e_test.py index c69726ce043b..4e86fdcb5391 100644 --- a/dataproc/dataproc_e2e_test.py +++ b/dataproc/dataproc_e2e_test.py @@ -16,16 +16,20 @@ submits a job to Dataproc that runs the pyspark file, then downloads the output logs from Cloud Storage and verifies the expected output.""" +import os + from gcp.testing.flaky import flaky import create_cluster_and_submit_job +PROJECT = os.environ['GCLOUD_PROJECT'] +BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] CLUSTER_NAME = 'testcluster2' ZONE = 'us-central1-b' @flaky -def test_e2e(cloud_config): +def test_e2e(): output = create_cluster_and_submit_job.main( - cloud_config.project, ZONE, CLUSTER_NAME, cloud_config.storage_bucket) + PROJECT, ZONE, CLUSTER_NAME, BUCKET) assert b"['Hello,', 'dog', 'elephant', 'panther', 'world!']" in output diff --git a/datastore/cloud-client/snippets_test.py b/datastore/cloud-client/snippets_test.py index 5730053ce95d..257e1da2a22b 100644 --- a/datastore/cloud-client/snippets_test.py +++ b/datastore/cloud-client/snippets_test.py @@ -11,6 +11,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os + from gcp.testing import eventually_consistent from gcp.testing.flaky import flaky from google.cloud import datastore @@ -18,6 +20,8 @@ import snippets +PROJECT = os.environ['GCLOUD_PROJECT'] + class CleanupClient(datastore.Client): def __init__(self, *args, **kwargs): @@ -33,8 +37,8 @@ def cleanup(self): @pytest.yield_fixture -def client(cloud_config): - client = CleanupClient(cloud_config.project) +def client(): + client = CleanupClient(PROJECT) yield client client.cleanup() diff --git a/datastore/cloud-client/tasks_test.py b/datastore/cloud-client/tasks_test.py index 2f4cbf02e594..72f7fceff170 100644 --- a/datastore/cloud-client/tasks_test.py +++ b/datastore/cloud-client/tasks_test.py @@ -11,6 +11,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os + from gcp.testing import eventually_consistent from gcp.testing.flaky import flaky from google.cloud import datastore @@ -18,10 +20,12 @@ import tasks +PROJECT = os.environ['GCLOUD_PROJECT'] + @pytest.yield_fixture -def client(cloud_config): - client = datastore.Client(cloud_config.project) +def client(): + client = datastore.Client(PROJECT) yield client @@ -32,8 +36,8 @@ def client(cloud_config): @flaky -def test_create_client(cloud_config): - tasks.create_client(cloud_config.project) +def test_create_client(): + tasks.create_client(PROJECT) @flaky diff --git a/dns/api/main_test.py b/dns/api/main_test.py index be6dfef78b5d..bf1677f95fcb 100644 --- a/dns/api/main_test.py +++ b/dns/api/main_test.py @@ -11,20 +11,23 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os + from gcp.testing.flaky import flaky from google.cloud import dns import pytest import main +PROJECT = os.environ['GCLOUD_PROJECT'] TEST_ZONE_NAME = 'test-zone' TEST_ZONE_DNS_NAME = 'theadora.is.' TEST_ZONE_DESCRIPTION = 'Test zone' @pytest.yield_fixture -def client(cloud_config): - client = dns.Client(cloud_config.project) +def client(): + client = dns.Client(PROJECT) yield client @@ -34,7 +37,7 @@ def client(cloud_config): @pytest.yield_fixture -def zone(client, cloud_config): +def zone(client): zone = client.zone(TEST_ZONE_NAME, TEST_ZONE_DNS_NAME) zone.description = TEST_ZONE_DESCRIPTION zone.create() @@ -46,9 +49,9 @@ def zone(client, cloud_config): @flaky -def test_create_zone(client, cloud_config): +def test_create_zone(client): zone = main.create_zone( - cloud_config.project, + PROJECT, TEST_ZONE_NAME, TEST_ZONE_DNS_NAME, TEST_ZONE_DESCRIPTION) @@ -59,8 +62,8 @@ def test_create_zone(client, cloud_config): @flaky -def test_get_zone(client, cloud_config, zone): - zone = main.get_zone(cloud_config.project, TEST_ZONE_NAME) +def test_get_zone(client, zone): + zone = main.get_zone(PROJECT, TEST_ZONE_NAME) assert zone.name == TEST_ZONE_NAME assert zone.dns_name == TEST_ZONE_DNS_NAME @@ -68,26 +71,26 @@ def test_get_zone(client, cloud_config, zone): @flaky -def test_list_zones(client, cloud_config, zone): - zones = main.list_zones(cloud_config.project) +def test_list_zones(client, zone): + zones = main.list_zones(PROJECT) assert TEST_ZONE_NAME in zones @flaky -def test_delete_zone(client, cloud_config, zone): - main.delete_zone(cloud_config.project, TEST_ZONE_NAME) +def test_delete_zone(client, zone): + main.delete_zone(PROJECT, TEST_ZONE_NAME) @flaky -def test_list_resource_records(client, cloud_config, zone): - records = main.list_resource_records(cloud_config.project, TEST_ZONE_NAME) +def test_list_resource_records(client, zone): + records = main.list_resource_records(PROJECT, TEST_ZONE_NAME) assert records @flaky -def test_list_changes(client, cloud_config, zone): - changes = main.list_changes(cloud_config.project, TEST_ZONE_NAME) +def test_list_changes(client, zone): + changes = main.list_changes(PROJECT, TEST_ZONE_NAME) assert changes diff --git a/iap/iap_test.py b/iap/iap_test.py index 484dfb582a06..3a51d770a33d 100644 --- a/iap/iap_test.py +++ b/iap/iap_test.py @@ -31,7 +31,7 @@ @flaky -def test_main(cloud_config, capsys): +def test_main(capsys): # JWTs are obtained by IAP-protected applications whenever an # end-user makes a request. We've set up an app that echoes back # the JWT in order to expose it to this test. Thus, this test diff --git a/kms/api-client/snippets_test.py b/kms/api-client/snippets_test.py index 873ce90d8f01..b3c4800d3360 100644 --- a/kms/api-client/snippets_test.py +++ b/kms/api-client/snippets_test.py @@ -13,6 +13,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and +import os import random import string @@ -20,6 +21,7 @@ import snippets +PROJECT = os.environ['GCLOUD_PROJECT'] # Your Google Cloud Platform Key Location LOCATION = 'global' @@ -42,25 +44,25 @@ ROLE = 'roles/owner' -def test_create_keyring(capsys, cloud_config): - snippets.create_keyring(cloud_config.project, LOCATION, KEYRING) +def test_create_keyring(capsys): + snippets.create_keyring(PROJECT, LOCATION, KEYRING) out, _ = capsys.readouterr() expected = 'Created KeyRing projects/{}/locations/{}/keyRings/{}.'.format( - cloud_config.project, LOCATION, KEYRING) + PROJECT, LOCATION, KEYRING) assert expected in out -def test_create_cryptokey(capsys, cloud_config): +def test_create_cryptokey(capsys): snippets.create_cryptokey( - cloud_config.project, LOCATION, KEYRING, CRYPTOKEY) + PROJECT, LOCATION, KEYRING, CRYPTOKEY) out, _ = capsys.readouterr() expected = ( 'Created CryptoKey projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}.' - .format(cloud_config.project, LOCATION, KEYRING, CRYPTOKEY)) + .format(PROJECT, LOCATION, KEYRING, CRYPTOKEY)) assert expected in out -def test_encrypt_decrypt(capsys, cloud_config, tmpdir): +def test_encrypt_decrypt(capsys, tmpdir): # Write to a plaintext file. tmpdir.join('in.txt').write('SampleText') @@ -71,10 +73,10 @@ def test_encrypt_decrypt(capsys, cloud_config, tmpdir): # Encrypt text and then decrypt it. snippets.encrypt( - cloud_config.project, LOCATION, KEYRING, CRYPTOKEY, + PROJECT, LOCATION, KEYRING, CRYPTOKEY, str(plaintext_file), str(encrypted_file)) snippets.decrypt( - cloud_config.project, LOCATION, KEYRING, CRYPTOKEY, + PROJECT, LOCATION, KEYRING, CRYPTOKEY, str(encrypted_file), str(decrypted_file)) # Make sure the decrypted text matches the original text. @@ -87,35 +89,35 @@ def test_encrypt_decrypt(capsys, cloud_config, tmpdir): assert 'Saved decrypted text to {}.'.format(str(decrypted_file)) in out -def test_disable_cryptokey_version(capsys, cloud_config): +def test_disable_cryptokey_version(capsys): snippets.disable_cryptokey_version( - cloud_config.project, LOCATION, KEYRING, CRYPTOKEY, VERSION) + PROJECT, LOCATION, KEYRING, CRYPTOKEY, VERSION) out, _ = capsys.readouterr() expected = ( 'CryptoKeyVersion projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}/' 'cryptoKeyVersions/{}\'s state has been set to {}.' .format( - cloud_config.project, LOCATION, KEYRING, CRYPTOKEY, VERSION, + PROJECT, LOCATION, KEYRING, CRYPTOKEY, VERSION, 'DISABLED')) assert expected in out -def test_destroy_cryptokey_version(capsys, cloud_config): +def test_destroy_cryptokey_version(capsys): snippets.destroy_cryptokey_version( - cloud_config.project, LOCATION, KEYRING, CRYPTOKEY, VERSION) + PROJECT, LOCATION, KEYRING, CRYPTOKEY, VERSION) out, _ = capsys.readouterr() expected = ( 'CryptoKeyVersion projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}/' 'cryptoKeyVersions/{}\'s state has been set to {}.' .format( - cloud_config.project, LOCATION, KEYRING, CRYPTOKEY, VERSION, + PROJECT, LOCATION, KEYRING, CRYPTOKEY, VERSION, 'DESTROY_SCHEDULED')) assert expected in out -def test_add_member_to_cryptokey_policy(capsys, cloud_config): +def test_add_member_to_cryptokey_policy(capsys): snippets.add_member_to_cryptokey_policy( - cloud_config.project, LOCATION, KEYRING, CRYPTOKEY, MEMBER, ROLE) + PROJECT, LOCATION, KEYRING, CRYPTOKEY, MEMBER, ROLE) out, _ = capsys.readouterr() expected = ( 'Member {} added with role {} to policy for CryptoKey {} in KeyRing {}' @@ -124,7 +126,7 @@ def test_add_member_to_cryptokey_policy(capsys, cloud_config): kms_client = googleapiclient.discovery.build('cloudkms', 'v1') parent = 'projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}'.format( - cloud_config.project, LOCATION, KEYRING, CRYPTOKEY) + PROJECT, LOCATION, KEYRING, CRYPTOKEY) cryptokeys = kms_client.projects().locations().keyRings().cryptoKeys() policy_request = cryptokeys.getIamPolicy(resource=parent) policy_response = policy_request.execute() @@ -139,8 +141,8 @@ def test_add_member_to_cryptokey_policy(capsys, cloud_config): assert found_member_role_pair -def test_get_keyring_policy(capsys, cloud_config): - project_id = cloud_config.project +def test_get_keyring_policy(capsys): + project_id = PROJECT snippets.get_keyring_policy(project_id, LOCATION, KEYRING) out, _ = capsys.readouterr() expected_roles_exist = ( diff --git a/language/cloud-client/snippets_test.py b/language/cloud-client/snippets_test.py index 47050e44e23c..080d5dd5d5bb 100644 --- a/language/cloud-client/snippets_test.py +++ b/language/cloud-client/snippets_test.py @@ -12,49 +12,47 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import snippets +BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] +TEST_FILE_URL = 'gs://{}/text.txt'.format(BUCKET) -def test_sentiment_text(cloud_config, capsys): + +def test_sentiment_text(capsys): snippets.sentiment_text('President Obama is speaking at the White House.') out, _ = capsys.readouterr() assert 'Score: 0.2' in out -def test_sentiment_file(cloud_config, capsys): - cloud_storage_input_uri = 'gs://{}/text.txt'.format( - cloud_config.storage_bucket) - snippets.sentiment_file(cloud_storage_input_uri) +def test_sentiment_file(capsys): + snippets.sentiment_file(TEST_FILE_URL) out, _ = capsys.readouterr() assert 'Score: 0.2' in out -def test_entities_text(cloud_config, capsys): +def test_entities_text(capsys): snippets.entities_text('President Obama is speaking at the White House.') out, _ = capsys.readouterr() assert 'name' in out assert ': Obama' in out -def test_entities_file(cloud_config, capsys): - cloud_storage_input_uri = 'gs://{}/text.txt'.format( - cloud_config.storage_bucket) - snippets.entities_file(cloud_storage_input_uri) +def test_entities_file(capsys): + snippets.entities_file(TEST_FILE_URL) out, _ = capsys.readouterr() assert 'name' in out assert ': Obama' in out -def test_syntax_text(cloud_config, capsys): +def test_syntax_text(capsys): snippets.syntax_text('President Obama is speaking at the White House.') out, _ = capsys.readouterr() assert 'NOUN: President' in out -def test_syntax_file(cloud_config, capsys): - cloud_storage_input_uri = 'gs://{}/text.txt'.format( - cloud_config.storage_bucket) - snippets.syntax_file(cloud_storage_input_uri) +def test_syntax_file(capsys): + snippets.syntax_file(TEST_FILE_URL) out, _ = capsys.readouterr() assert 'NOUN: President' in out diff --git a/language/ocr_nl/main_test.py b/language/ocr_nl/main_test.py index e5a9962e1e25..832483ca5317 100755 --- a/language/ocr_nl/main_test.py +++ b/language/ocr_nl/main_test.py @@ -13,15 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Tests for main.""" - +import os import re import zipfile import main - -_TEST_IMAGE_URI = 'gs://{}/language/image8.png' +BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] +TEST_IMAGE_URI = 'gs://{}/language/image8.png'.format(BUCKET) def test_batch_empty(): @@ -36,10 +35,10 @@ def test_batch_single(): assert batched == ((1,),) -def test_single_image_returns_text(cloud_config): +def test_single_image_returns_text(): vision_api_client = main.VisionApi() - image_path = _TEST_IMAGE_URI.format(cloud_config.storage_bucket) + image_path = TEST_IMAGE_URI texts = vision_api_client.detect_text([image_path]) assert image_path in texts @@ -66,9 +65,9 @@ def test_text_returns_entities(): assert wurl == 'http://en.wikipedia.org/wiki/Sherlock_Holmes' -def test_entities_list(cloud_config): +def test_entities_list(): vision_api_client = main.VisionApi() - image_path = _TEST_IMAGE_URI.format(cloud_config.storage_bucket) + image_path = TEST_IMAGE_URI texts = vision_api_client.detect_text([image_path]) locale, document = main.extract_description(texts[image_path]) text_analyzer = main.TextAnalyzer() diff --git a/logging/api-client/list_logs_test.py b/logging/api-client/list_logs_test.py index f7f94cf84b12..7b17c06f5b9a 100644 --- a/logging/api-client/list_logs_test.py +++ b/logging/api-client/list_logs_test.py @@ -11,12 +11,15 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import re import list_logs +PROJECT = os.environ['GCLOUD_PROJECT'] -def test_main(cloud_config, capsys): - list_logs.main(cloud_config.project) + +def test_main(capsys): + list_logs.main(PROJECT) out, _ = capsys.readouterr() assert re.search(re.compile(r'.*', re.S), out) diff --git a/logging/cloud-client/export_test.py b/logging/cloud-client/export_test.py index 8f1c299d7e76..99b78f8e43bf 100644 --- a/logging/cloud-client/export_test.py +++ b/logging/cloud-client/export_test.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import random import string @@ -21,6 +22,7 @@ import export +BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] TEST_SINK_NAME_TMPL = 'example_sink_{}' TEST_SINK_FILTER = 'severity>=CRITICAL' @@ -32,14 +34,13 @@ def _random_id(): @pytest.yield_fixture -def example_sink(cloud_config): +def example_sink(): client = logging.Client() sink = client.sink( TEST_SINK_NAME_TMPL.format(_random_id()), TEST_SINK_FILTER, - 'storage.googleapis.com/{bucket}'.format( - bucket=cloud_config.storage_bucket)) + 'storage.googleapis.com/{bucket}'.format(bucket=BUCKET)) sink.create() @@ -59,13 +60,13 @@ def _(): assert example_sink.name in out -def test_create(cloud_config, capsys): +def test_create(capsys): sink_name = TEST_SINK_NAME_TMPL.format(_random_id()) try: export.create_sink( sink_name, - cloud_config.storage_bucket, + BUCKET, TEST_SINK_FILTER) # Clean-up the temporary sink. finally: diff --git a/monitoring/api/v2/auth_test.py b/monitoring/api/v2/auth_test.py index 9ad0e8c3e753..6349b7c07fcf 100644 --- a/monitoring/api/v2/auth_test.py +++ b/monitoring/api/v2/auth_test.py @@ -11,13 +11,16 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import re import auth +PROJECT = os.environ['GCLOUD_PROJECT'] -def test_main(cloud_config, capsys): - auth.list_timeseries(cloud_config.project) + +def test_main(capsys): + auth.list_timeseries(PROJECT) output, _ = capsys.readouterr() assert re.search( diff --git a/monitoring/api/v2/labeled_custom_metric_test.py b/monitoring/api/v2/labeled_custom_metric_test.py index f564eef0fd5d..1ad2009aca40 100644 --- a/monitoring/api/v2/labeled_custom_metric_test.py +++ b/monitoring/api/v2/labeled_custom_metric_test.py @@ -14,13 +14,16 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import re import labeled_custom_metric +PROJECT = os.environ['GCLOUD_PROJECT'] -def test_main(cloud_config, capsys): - labeled_custom_metric.main(cloud_config.project, "yellow", "large", "10") + +def test_main(capsys): + labeled_custom_metric.main(PROJECT, "yellow", "large", "10") output, _ = capsys.readouterr() assert re.search( diff --git a/monitoring/api/v2/lightweight_custom_metric_test.py b/monitoring/api/v2/lightweight_custom_metric_test.py index f8c0598a61b1..d95eb47d926a 100644 --- a/monitoring/api/v2/lightweight_custom_metric_test.py +++ b/monitoring/api/v2/lightweight_custom_metric_test.py @@ -14,13 +14,16 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import re import lightweight_custom_metric +PROJECT = os.environ['GCLOUD_PROJECT'] -def test_main(cloud_config, capsys): - lightweight_custom_metric.main(cloud_config.project) + +def test_main(capsys): + lightweight_custom_metric.main(PROJECT) output, _ = capsys.readouterr() assert re.search( diff --git a/monitoring/api/v3/api-client/custom_metric_test.py b/monitoring/api/v3/api-client/custom_metric_test.py index 824415076a26..76f328a3dbe2 100644 --- a/monitoring/api/v3/api-client/custom_metric_test.py +++ b/monitoring/api/v3/api-client/custom_metric_test.py @@ -20,6 +20,7 @@ for this test, but it could be changed to a different project. """ +import os import random import time @@ -34,6 +35,8 @@ from custom_metric import read_timeseries from custom_metric import write_timeseries_value +PROJECT = os.environ['GCLOUD_PROJECT'] + """ Custom metric domain for all custom metrics""" CUSTOM_METRIC_DOMAIN = "custom.googleapis.com" @@ -50,8 +53,8 @@ def client(): @flaky -def test_custom_metric(cloud_config, client): - PROJECT_RESOURCE = "projects/{}".format(cloud_config.project) +def test_custom_metric(client): + PROJECT_RESOURCE = "projects/{}".format(PROJECT) # Use a constant seed so psuedo random number is known ahead of time random.seed(1) pseudo_random_value = random.randint(0, 10) diff --git a/monitoring/api/v3/api-client/list_resources_test.py b/monitoring/api/v3/api-client/list_resources_test.py index 663e963bc80c..c17f09bcbbcb 100644 --- a/monitoring/api/v3/api-client/list_resources_test.py +++ b/monitoring/api/v3/api-client/list_resources_test.py @@ -20,6 +20,7 @@ for this test, but it could be changed to a different project. """ +import os import re from gcp.testing.flaky import flaky @@ -28,6 +29,7 @@ import list_resources +PROJECT = os.environ['GCLOUD_PROJECT'] METRIC = 'compute.googleapis.com/instance/cpu/usage_time' @@ -37,8 +39,8 @@ def client(): @flaky -def test_list_monitored_resources(cloud_config, client, capsys): - PROJECT_RESOURCE = "projects/{}".format(cloud_config.project) +def test_list_monitored_resources(client, capsys): + PROJECT_RESOURCE = "projects/{}".format(PROJECT) list_resources.list_monitored_resource_descriptors( client, PROJECT_RESOURCE) stdout, _ = capsys.readouterr() @@ -48,8 +50,8 @@ def test_list_monitored_resources(cloud_config, client, capsys): @flaky -def test_list_metrics(cloud_config, client, capsys): - PROJECT_RESOURCE = "projects/{}".format(cloud_config.project) +def test_list_metrics(client, capsys): + PROJECT_RESOURCE = "projects/{}".format(PROJECT) list_resources.list_metric_descriptors( client, PROJECT_RESOURCE, METRIC) stdout, _ = capsys.readouterr() @@ -59,8 +61,8 @@ def test_list_metrics(cloud_config, client, capsys): @flaky -def test_list_timeseries(cloud_config, client, capsys): - PROJECT_RESOURCE = "projects/{}".format(cloud_config.project) +def test_list_timeseries(client, capsys): + PROJECT_RESOURCE = "projects/{}".format(PROJECT) list_resources.list_timeseries( client, PROJECT_RESOURCE, METRIC) stdout, _ = capsys.readouterr() diff --git a/spanner/cloud-client/quickstart_test.py b/spanner/cloud-client/quickstart_test.py index dafac78b952c..c2e42001b8d3 100644 --- a/spanner/cloud-client/quickstart_test.py +++ b/spanner/cloud-client/quickstart_test.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os + from google.cloud import spanner import google.cloud.exceptions import google.cloud.spanner.client @@ -20,13 +22,15 @@ import quickstart +SPANNER_INSTANCE = os.environ['SPANNER_INSTANCE'] + @pytest.fixture -def patch_instance(cloud_config): +def patch_instance(): original_instance = google.cloud.spanner.client.Client.instance def new_instance(self, unused_instance_name): - return original_instance(self, cloud_config.spanner_instance) + return original_instance(self, SPANNER_INSTANCE) instance_patch = mock.patch( 'google.cloud.spanner.client.Client.instance', @@ -38,9 +42,9 @@ def new_instance(self, unused_instance_name): @pytest.fixture -def example_database(cloud_config): +def example_database(): spanner_client = spanner.Client() - instance = spanner_client.instance(cloud_config.spanner_instance) + instance = spanner_client.instance(SPANNER_INSTANCE) database = instance.database('my-database-id') if not database.exists(): diff --git a/spanner/cloud-client/snippets_test.py b/spanner/cloud-client/snippets_test.py index 36915332bc02..3feec3e6e9f3 100644 --- a/spanner/cloud-client/snippets_test.py +++ b/spanner/cloud-client/snippets_test.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import random import string @@ -21,11 +22,13 @@ import snippets +SPANNER_INSTANCE = os.environ['SPANNER_INSTANCE'] + @pytest.fixture(scope='module') -def spanner_instance(cloud_config): +def spanner_instance(): spanner_client = spanner.Client() - return spanner_client.instance(cloud_config.spanner_instance) + return spanner_client.instance(SPANNER_INSTANCE) def unique_database_id(): @@ -33,11 +36,10 @@ def unique_database_id(): string.ascii_lowercase + string.digits) for _ in range(5))) -def test_create_database(cloud_config, spanner_instance): +def test_create_database(spanner_instance): database_id = unique_database_id() - print(cloud_config.spanner_instance, database_id) - snippets.create_database( - cloud_config.spanner_instance, database_id) + print(SPANNER_INSTANCE, database_id) + snippets.create_database(SPANNER_INSTANCE, database_id) database = spanner_instance.database(database_id) database.reload() # Will only succeed if the database exists. @@ -45,29 +47,26 @@ def test_create_database(cloud_config, spanner_instance): @pytest.fixture(scope='module') -def temporary_database(cloud_config, spanner_instance): +def temporary_database(spanner_instance): database_id = unique_database_id() - snippets.create_database(cloud_config.spanner_instance, database_id) - snippets.insert_data( - cloud_config.spanner_instance, database_id) + snippets.create_database(SPANNER_INSTANCE, database_id) + snippets.insert_data(SPANNER_INSTANCE, database_id) database = spanner_instance.database(database_id) database.reload() yield database database.drop() -def test_query_data(cloud_config, temporary_database, capsys): - snippets.query_data( - cloud_config.spanner_instance, temporary_database.database_id) +def test_query_data(temporary_database, capsys): + snippets.query_data(SPANNER_INSTANCE, temporary_database.database_id) out, _ = capsys.readouterr() assert 'Total Junk' in out -def test_read_data(cloud_config, temporary_database, capsys): - snippets.read_data( - cloud_config.spanner_instance, temporary_database.database_id) +def test_read_data(temporary_database, capsys): + snippets.read_data(SPANNER_INSTANCE, temporary_database.database_id) out, _ = capsys.readouterr() @@ -75,22 +74,20 @@ def test_read_data(cloud_config, temporary_database, capsys): @pytest.fixture(scope='module') -def temporary_database_with_column(cloud_config, temporary_database): - snippets.add_column( - cloud_config.spanner_instance, temporary_database.database_id) +def temporary_database_with_column(temporary_database): + snippets.add_column(SPANNER_INSTANCE, temporary_database.database_id) yield temporary_database -def test_update_data(cloud_config, temporary_database_with_column): +def test_update_data(temporary_database_with_column): snippets.update_data( - cloud_config.spanner_instance, + SPANNER_INSTANCE, temporary_database_with_column.database_id) -def test_query_data_with_new_column( - cloud_config, temporary_database_with_column, capsys): +def test_query_data_with_new_column(temporary_database_with_column, capsys): snippets.query_data_with_new_column( - cloud_config.spanner_instance, + SPANNER_INSTANCE, temporary_database_with_column.database_id) out, _ = capsys.readouterr() @@ -98,25 +95,23 @@ def test_query_data_with_new_column( @pytest.fixture(scope='module') -def temporary_database_with_indexes( - cloud_config, temporary_database_with_column): +def temporary_database_with_indexes(temporary_database_with_column): snippets.add_index( - cloud_config.spanner_instance, + SPANNER_INSTANCE, temporary_database_with_column.database_id) snippets.add_storing_index( - cloud_config.spanner_instance, + SPANNER_INSTANCE, temporary_database_with_column.database_id) yield temporary_database_with_column @pytest.mark.slow -def test_query_data_with_index( - cloud_config, temporary_database_with_indexes, capsys): +def test_query_data_with_index(temporary_database_with_indexes, capsys): @eventually_consistent.call def _(): snippets.query_data_with_index( - cloud_config.spanner_instance, + SPANNER_INSTANCE, temporary_database_with_indexes.database_id) out, _ = capsys.readouterr() @@ -124,12 +119,11 @@ def _(): @pytest.mark.slow -def test_read_data_with_index( - cloud_config, temporary_database_with_indexes, capsys): +def test_read_data_with_index(temporary_database_with_indexes, capsys): @eventually_consistent.call def _(): snippets.read_data_with_index( - cloud_config.spanner_instance, + SPANNER_INSTANCE, temporary_database_with_indexes.database_id) out, _ = capsys.readouterr() @@ -137,12 +131,11 @@ def _(): @pytest.mark.slow -def test_read_data_with_storing_index( - cloud_config, temporary_database_with_indexes, capsys): +def test_read_data_with_storing_index(temporary_database_with_indexes, capsys): @eventually_consistent.call def _(): snippets.read_data_with_storing_index( - cloud_config.spanner_instance, + SPANNER_INSTANCE, temporary_database_with_indexes.database_id) out, _ = capsys.readouterr() @@ -150,15 +143,14 @@ def _(): @pytest.mark.slow -def test_read_write_transaction( - cloud_config, temporary_database_with_column, capsys): +def test_read_write_transaction(temporary_database_with_column, capsys): @eventually_consistent.call def _(): snippets.update_data( - cloud_config.spanner_instance, + SPANNER_INSTANCE, temporary_database_with_column.database_id) snippets.read_write_transaction( - cloud_config.spanner_instance, + SPANNER_INSTANCE, temporary_database_with_column.database_id) out, _ = capsys.readouterr() @@ -167,12 +159,11 @@ def _(): @pytest.mark.slow -def test_read_only_transaction( - cloud_config, temporary_database, capsys): +def test_read_only_transaction(temporary_database, capsys): @eventually_consistent.call def _(): snippets.read_only_transaction( - cloud_config.spanner_instance, + SPANNER_INSTANCE, temporary_database.database_id) out, _ = capsys.readouterr() diff --git a/speech/grpc/transcribe_async_test.py b/speech/grpc/transcribe_async_test.py index c57be6202175..8f99bc47f1af 100644 --- a/speech/grpc/transcribe_async_test.py +++ b/speech/grpc/transcribe_async_test.py @@ -11,16 +11,18 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import re from transcribe_async import main +BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] -def test_main(resource, capsys, cloud_config): + +def test_main(resource, capsys): # Run the transcribe sample on audio.raw, verify correct results - storage_uri = 'gs://{}/speech/audio.raw'.format( - cloud_config.storage_bucket) + storage_uri = 'gs://{}/speech/audio.raw'.format(BUCKET) main(storage_uri, 'LINEAR16', 16000) out, err = capsys.readouterr() assert re.search(r'how old is the Brooklyn Bridge', out, re.DOTALL | re.I) diff --git a/speech/grpc/transcribe_test.py b/speech/grpc/transcribe_test.py index 23b97c27100a..d7e815ebf182 100644 --- a/speech/grpc/transcribe_test.py +++ b/speech/grpc/transcribe_test.py @@ -11,16 +11,18 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import re from transcribe import main +BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] -def test_main(resource, capsys, cloud_config): + +def test_main(resource, capsys): # Run the transcribe sample on audio.raw, verify correct results - storage_uri = 'gs://{}/speech/audio.raw'.format( - cloud_config.storage_bucket) + storage_uri = 'gs://{}/speech/audio.raw'.format(BUCKET) main(storage_uri, 'LINEAR16', 16000) out, err = capsys.readouterr() assert re.search(r'how old is the Brooklyn Bridge', out, re.DOTALL | re.I) diff --git a/storage/api/compose_objects_test.py b/storage/api/compose_objects_test.py index 131adc6279de..3b5b05df372b 100644 --- a/storage/api/compose_objects_test.py +++ b/storage/api/compose_objects_test.py @@ -11,12 +11,16 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os + from compose_objects import main +BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] + -def test_main(cloud_config, resource): +def test_main(resource): main( - cloud_config.storage_bucket, + BUCKET, 'dest.txt', [resource('file1.txt'), resource('file2.txt')] diff --git a/storage/api/crud_object_test.py b/storage/api/crud_object_test.py index 89e764b4d587..d2798a436499 100644 --- a/storage/api/crud_object_test.py +++ b/storage/api/crud_object_test.py @@ -11,13 +11,16 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import re from crud_object import main +BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] -def test_main(cloud_config, capsys): - main(cloud_config.storage_bucket, __file__) + +def test_main(capsys): + main(BUCKET, __file__) out, err = capsys.readouterr() assert not re.search(r'Downloaded file [!]=', out) diff --git a/storage/api/customer_supplied_keys_test.py b/storage/api/customer_supplied_keys_test.py index fe0a6db038a5..00bb48245cc0 100644 --- a/storage/api/customer_supplied_keys_test.py +++ b/storage/api/customer_supplied_keys_test.py @@ -11,16 +11,19 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import re from gcp.testing.flaky import flaky from customer_supplied_keys import main +BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] + @flaky -def test_main(cloud_config, capsys): - main(cloud_config.storage_bucket, __file__) +def test_main(capsys): + main(BUCKET, __file__) out, err = capsys.readouterr() assert not re.search(r'Downloaded file [!]=', out) diff --git a/storage/api/list_objects_test.py b/storage/api/list_objects_test.py index 9910bba57e8a..374dcb8f53a1 100644 --- a/storage/api/list_objects_test.py +++ b/storage/api/list_objects_test.py @@ -11,8 +11,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os + from list_objects import main +BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] + -def test_main(cloud_config): - main(cloud_config.storage_bucket) +def test_main(): + main(BUCKET) diff --git a/storage/cloud-client/acl_test.py b/storage/cloud-client/acl_test.py index 3197b4ea0f52..0c9fd1a50088 100644 --- a/storage/cloud-client/acl_test.py +++ b/storage/cloud-client/acl_test.py @@ -12,21 +12,24 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os + from google.cloud import storage import google.cloud.storage.acl import pytest import acl +BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] # Typically we'd use a @example.com address, but GCS requires a real Google # account. TEST_EMAIL = 'jonwayne@google.com' @pytest.fixture -def test_bucket(cloud_config): +def test_bucket(): """Yields a bucket that resets its acl after the test completes.""" - bucket = storage.Client().bucket(cloud_config.storage_bucket) + bucket = storage.Client().bucket(BUCKET) acl = google.cloud.storage.acl.BucketACL(bucket) object_default_acl = google.cloud.storage.acl.DefaultObjectACL(bucket) acl.reload() @@ -37,9 +40,9 @@ def test_bucket(cloud_config): @pytest.fixture -def test_blob(cloud_config): +def test_blob(): """Yields a blob that resets its acl after the test completes.""" - bucket = storage.Client().bucket(cloud_config.storage_bucket) + bucket = storage.Client().bucket(BUCKET) blob = bucket.blob('storage_acl_test_sigil') blob.upload_from_string('Hello, is it me you\'re looking for?') acl = google.cloud.storage.acl.ObjectACL(blob) @@ -48,88 +51,88 @@ def test_blob(cloud_config): acl.save() -def test_print_bucket_acl(cloud_config, capsys): - acl.print_bucket_acl(cloud_config.storage_bucket) +def test_print_bucket_acl(capsys): + acl.print_bucket_acl(BUCKET) out, _ = capsys.readouterr() assert out -def test_print_bucket_acl_for_user(test_bucket, cloud_config, capsys): +def test_print_bucket_acl_for_user(test_bucket, capsys): test_bucket.acl.user(TEST_EMAIL).grant_owner() test_bucket.acl.save() - acl.print_bucket_acl_for_user(cloud_config.storage_bucket, TEST_EMAIL) + acl.print_bucket_acl_for_user(BUCKET, TEST_EMAIL) out, _ = capsys.readouterr() assert 'OWNER' in out -def test_add_bucket_owner(test_bucket, cloud_config): - acl.add_bucket_owner(cloud_config.storage_bucket, TEST_EMAIL) +def test_add_bucket_owner(test_bucket): + acl.add_bucket_owner(BUCKET, TEST_EMAIL) test_bucket.acl.reload() assert 'OWNER' in test_bucket.acl.user(TEST_EMAIL).get_roles() -def test_remove_bucket_owner(test_bucket, cloud_config): +def test_remove_bucket_owner(test_bucket): test_bucket.acl.user(TEST_EMAIL).grant_owner() test_bucket.acl.save() - acl.remove_bucket_owner(cloud_config.storage_bucket, TEST_EMAIL) + acl.remove_bucket_owner(BUCKET, TEST_EMAIL) test_bucket.acl.reload() assert 'OWNER' not in test_bucket.acl.user(TEST_EMAIL).get_roles() -def test_add_bucket_default_owner(test_bucket, cloud_config): - acl.add_bucket_default_owner(cloud_config.storage_bucket, TEST_EMAIL) +def test_add_bucket_default_owner(test_bucket): + acl.add_bucket_default_owner(BUCKET, TEST_EMAIL) test_bucket.default_object_acl.reload() roles = test_bucket.default_object_acl.user(TEST_EMAIL).get_roles() assert 'OWNER' in roles -def test_remove_bucket_default_owner(test_bucket, cloud_config): +def test_remove_bucket_default_owner(test_bucket): test_bucket.acl.user(TEST_EMAIL).grant_owner() test_bucket.acl.save() - acl.remove_bucket_default_owner(cloud_config.storage_bucket, TEST_EMAIL) + acl.remove_bucket_default_owner(BUCKET, TEST_EMAIL) test_bucket.default_object_acl.reload() roles = test_bucket.default_object_acl.user(TEST_EMAIL).get_roles() assert 'OWNER' not in roles -def test_print_blob_acl(test_blob, cloud_config, capsys): - acl.print_blob_acl(cloud_config.storage_bucket, test_blob.name) +def test_print_blob_acl(test_blob, capsys): + acl.print_blob_acl(BUCKET, test_blob.name) out, _ = capsys.readouterr() assert out -def test_print_blob_acl_for_user(test_blob, cloud_config, capsys): +def test_print_blob_acl_for_user(test_blob, capsys): test_blob.acl.user(TEST_EMAIL).grant_owner() test_blob.acl.save() acl.print_blob_acl_for_user( - cloud_config.storage_bucket, test_blob.name, TEST_EMAIL) + BUCKET, test_blob.name, TEST_EMAIL) out, _ = capsys.readouterr() assert 'OWNER' in out -def test_add_blob_owner(test_blob, cloud_config): - acl.add_blob_owner(cloud_config.storage_bucket, test_blob.name, TEST_EMAIL) +def test_add_blob_owner(test_blob): + acl.add_blob_owner(BUCKET, test_blob.name, TEST_EMAIL) test_blob.acl.reload() assert 'OWNER' in test_blob.acl.user(TEST_EMAIL).get_roles() -def test_remove_blob_owner(test_blob, cloud_config): +def test_remove_blob_owner(test_blob): test_blob.acl.user(TEST_EMAIL).grant_owner() test_blob.acl.save() acl.remove_blob_owner( - cloud_config.storage_bucket, test_blob.name, TEST_EMAIL) + BUCKET, test_blob.name, TEST_EMAIL) test_blob.acl.reload() assert 'OWNER' not in test_blob.acl.user(TEST_EMAIL).get_roles() diff --git a/storage/cloud-client/encryption_test.py b/storage/cloud-client/encryption_test.py index aec91e50ec99..4db6e6cb0f04 100644 --- a/storage/cloud-client/encryption_test.py +++ b/storage/cloud-client/encryption_test.py @@ -13,6 +13,7 @@ # limitations under the License. import base64 +import os import tempfile from google.cloud import storage @@ -21,6 +22,7 @@ import encryption +BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] TEST_ENCRYPTION_KEY = 'brtJUWneL92g5q0N2gyDSnlPSYAiIVZ/cWgjyZNeMy0=' TEST_ENCRYPTION_KEY_DECODED = base64.b64decode(TEST_ENCRYPTION_KEY) @@ -37,21 +39,21 @@ def test_generate_encryption_key(capsys): assert len(key) == 32, 'Returned key should be 32 bytes' -def test_upload_encrypted_blob(cloud_config): +def test_upload_encrypted_blob(): with tempfile.NamedTemporaryFile() as source_file: source_file.write(b'test') encryption.upload_encrypted_blob( - cloud_config.storage_bucket, + BUCKET, source_file.name, 'test_encrypted_upload_blob', TEST_ENCRYPTION_KEY) @pytest.fixture -def test_blob(cloud_config): +def test_blob(): """Provides a pre-existing blob in the test bucket.""" - bucket = storage.Client().bucket(cloud_config.storage_bucket) + bucket = storage.Client().bucket(BUCKET) blob = Blob('encryption_test_sigil', bucket, encryption_key=TEST_ENCRYPTION_KEY_DECODED) content = 'Hello, is it me you\'re looking for?' @@ -59,11 +61,11 @@ def test_blob(cloud_config): return blob.name, content -def test_download_blob(test_blob, cloud_config): +def test_download_blob(test_blob): test_blob_name, test_blob_content = test_blob with tempfile.NamedTemporaryFile() as dest_file: encryption.download_encrypted_blob( - cloud_config.storage_bucket, + BUCKET, test_blob_name, dest_file.name, TEST_ENCRYPTION_KEY) @@ -72,17 +74,17 @@ def test_download_blob(test_blob, cloud_config): assert downloaded_content == test_blob_content -def test_rotate_encryption_key(test_blob, cloud_config): +def test_rotate_encryption_key(test_blob): test_blob_name, test_blob_content = test_blob encryption.rotate_encryption_key( - cloud_config.storage_bucket, + BUCKET, test_blob_name, TEST_ENCRYPTION_KEY, TEST_ENCRYPTION_KEY_2) with tempfile.NamedTemporaryFile() as dest_file: encryption.download_encrypted_blob( - cloud_config.storage_bucket, + BUCKET, test_blob_name, dest_file.name, TEST_ENCRYPTION_KEY_2) diff --git a/storage/cloud-client/snippets_test.py b/storage/cloud-client/snippets_test.py index a657894be304..a44e8ebf1e93 100644 --- a/storage/cloud-client/snippets_test.py +++ b/storage/cloud-client/snippets_test.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import tempfile from google.cloud import storage @@ -21,74 +22,76 @@ import snippets +BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] + @pytest.fixture -def test_blob(cloud_config): +def test_blob(): """Provides a pre-existing blob in the test bucket.""" - bucket = storage.Client().bucket(cloud_config.storage_bucket) + bucket = storage.Client().bucket(BUCKET) blob = bucket.blob('storage_snippets_test_sigil') blob.upload_from_string('Hello, is it me you\'re looking for?') return blob -def test_list_blobs(test_blob, cloud_config, capsys): - snippets.list_blobs(cloud_config.storage_bucket) +def test_list_blobs(test_blob, capsys): + snippets.list_blobs(BUCKET) out, _ = capsys.readouterr() assert test_blob.name in out -def test_list_blobs_with_prefix(test_blob, cloud_config, capsys): +def test_list_blobs_with_prefix(test_blob, capsys): snippets.list_blobs_with_prefix( - cloud_config.storage_bucket, + BUCKET, prefix='storage_snippets') out, _ = capsys.readouterr() assert test_blob.name in out -def test_upload_blob(cloud_config): +def test_upload_blob(): with tempfile.NamedTemporaryFile() as source_file: source_file.write(b'test') snippets.upload_blob( - cloud_config.storage_bucket, + BUCKET, source_file.name, 'test_upload_blob') -def test_download_blob(test_blob, cloud_config): +def test_download_blob(test_blob): with tempfile.NamedTemporaryFile() as dest_file: snippets.download_blob( - cloud_config.storage_bucket, + BUCKET, test_blob.name, dest_file.name) assert dest_file.read() -def test_blob_metadata(test_blob, cloud_config, capsys): - snippets.blob_metadata(cloud_config.storage_bucket, test_blob.name) +def test_blob_metadata(test_blob, capsys): + snippets.blob_metadata(BUCKET, test_blob.name) out, _ = capsys.readouterr() assert test_blob.name in out -def test_delete_blob(test_blob, cloud_config): +def test_delete_blob(test_blob): snippets.delete_blob( - cloud_config.storage_bucket, + BUCKET, test_blob.name) -def test_make_blob_public(test_blob, cloud_config): +def test_make_blob_public(test_blob): snippets.make_blob_public( - cloud_config.storage_bucket, + BUCKET, test_blob.name) r = requests.get(test_blob.public_url) assert r.text == 'Hello, is it me you\'re looking for?' -def test_generate_signed_url(test_blob, cloud_config, capsys): +def test_generate_signed_url(test_blob, capsys): snippets.generate_signed_url( - cloud_config.storage_bucket, + BUCKET, test_blob.name) out, _ = capsys.readouterr() @@ -98,8 +101,8 @@ def test_generate_signed_url(test_blob, cloud_config, capsys): assert r.text == 'Hello, is it me you\'re looking for?' -def test_rename_blob(test_blob, cloud_config): - bucket = storage.Client().bucket(cloud_config.storage_bucket) +def test_rename_blob(test_blob): + bucket = storage.Client().bucket(BUCKET) try: bucket.delete_blob('test_rename_blob') @@ -112,8 +115,8 @@ def test_rename_blob(test_blob, cloud_config): assert bucket.get_blob(test_blob.name) is None -def test_copy_blob(test_blob, cloud_config): - bucket = storage.Client().bucket(cloud_config.storage_bucket) +def test_copy_blob(test_blob): + bucket = storage.Client().bucket(BUCKET) try: bucket.delete_blob('test_copy_blob') diff --git a/translate/cloud-client/snippets_test.py b/translate/cloud-client/snippets_test.py index 434449f7cc70..620fffbda824 100644 --- a/translate/cloud-client/snippets_test.py +++ b/translate/cloud-client/snippets_test.py @@ -18,25 +18,25 @@ import snippets -def test_detect_language(cloud_config, capsys): +def test_detect_language(capsys): snippets.detect_language('Hæ sæta') out, _ = capsys.readouterr() assert 'is' in out -def test_list_languages(cloud_config, capsys): +def test_list_languages(capsys): snippets.list_languages() out, _ = capsys.readouterr() assert 'Icelandic (is)' in out -def test_list_languages_with_target(cloud_config, capsys): +def test_list_languages_with_target(capsys): snippets.list_languages_with_target('is') out, _ = capsys.readouterr() assert u'íslenska (is)' in out -def test_translate_text(cloud_config, capsys): +def test_translate_text(capsys): snippets.translate_text('is', 'Hello world') out, _ = capsys.readouterr() assert u'Halló heimur' in out diff --git a/video/cloud-client/analyze_test.py b/video/cloud-client/analyze_test.py index b95167c00c7c..f3178319b363 100644 --- a/video/cloud-client/analyze_test.py +++ b/video/cloud-client/analyze_test.py @@ -14,35 +14,37 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os + import pytest import analyze - +BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] LABELS_FILE_PATH = '/video/cat.mp4' FACES_FILE_PATH = '/video/googlework.mp4' SHOTS_FILE_PATH = '/video/gbikes_dinosaur.mp4' @pytest.mark.slow -def test_cat_video_shots(capsys, cloud_config): +def test_cat_video_shots(capsys): analyze.analyze_shots( - 'gs://{}{}'.format(cloud_config.storage_bucket, SHOTS_FILE_PATH)) + 'gs://{}{}'.format(BUCKET, SHOTS_FILE_PATH)) out, _ = capsys.readouterr() assert 'Scene 1:' in out @pytest.mark.slow -def test_work_video_faces(capsys, cloud_config): +def test_work_video_faces(capsys): analyze.analyze_faces( - 'gs://{}{}'.format(cloud_config.storage_bucket, FACES_FILE_PATH)) + 'gs://{}{}'.format(BUCKET, FACES_FILE_PATH)) out, _ = capsys.readouterr() assert 'Thumbnail' in out @pytest.mark.slow -def test_dino_video_labels(capsys, cloud_config): +def test_dino_video_labels(capsys): analyze.analyze_labels( - 'gs://{}{}'.format(cloud_config.storage_bucket, LABELS_FILE_PATH)) + 'gs://{}{}'.format(BUCKET, LABELS_FILE_PATH)) out, _ = capsys.readouterr() assert 'Whiskers' in out diff --git a/vision/cloud-client/crop_hints/crop_hints_test.py b/vision/cloud-client/crop_hints/crop_hints_test.py index eb30eb436fbc..2ba900f48b1b 100644 --- a/vision/cloud-client/crop_hints/crop_hints_test.py +++ b/vision/cloud-client/crop_hints/crop_hints_test.py @@ -17,7 +17,7 @@ import crop_hints -def test_crop(cloud_config, capsys): +def test_crop(capsys): """Checks the output image for cropping the image is created.""" file_name = os.path.join( os.path.dirname(__file__), @@ -27,7 +27,7 @@ def test_crop(cloud_config, capsys): assert os.path.isfile('output-crop.jpg') -def test_draw(cloud_config, capsys): +def test_draw(capsys): """Checks the output image for drawing the crop hint is created.""" file_name = os.path.join( os.path.dirname(__file__), diff --git a/vision/cloud-client/detect/detect_test.py b/vision/cloud-client/detect/detect_test.py index 5e7d2520a2f5..8dd40b8b2642 100644 --- a/vision/cloud-client/detect/detect_test.py +++ b/vision/cloud-client/detect/detect_test.py @@ -16,8 +16,10 @@ import detect +BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] -def test_labels(cloud_config, capsys): + +def test_labels(capsys): file_name = os.path.join( os.path.dirname(__file__), 'resources/wakeupcat.jpg') @@ -26,15 +28,14 @@ def test_labels(cloud_config, capsys): assert 'Labels' in out -def test_labels_uri(cloud_config, capsys): - file_name = ('gs://{}/vision/wakeupcat.jpg'.format( - cloud_config.storage_bucket)) +def test_labels_uri(capsys): + file_name = 'gs://{}/vision/wakeupcat.jpg'.format(BUCKET) detect.detect_labels_uri(file_name) out, _ = capsys.readouterr() assert 'Labels' in out -def test_landmarks(cloud_config, capsys): +def test_landmarks(capsys): file_name = os.path.join( os.path.dirname(__file__), 'resources/landmark.jpg') @@ -43,15 +44,14 @@ def test_landmarks(cloud_config, capsys): assert 'Palace' in out -def test_landmarks_uri(cloud_config, capsys): - file_name = ('gs://{}/vision/landmark.jpg'.format( - cloud_config.storage_bucket)) +def test_landmarks_uri(capsys): + file_name = 'gs://{}/vision/landmark.jpg'.format(BUCKET) detect.detect_landmarks_uri(file_name) out, _ = capsys.readouterr() assert 'Palace' in out -def test_faces(cloud_config, capsys): +def test_faces(capsys): file_name = os.path.join( os.path.dirname(__file__), 'resources/face_no_surprise.jpg') @@ -60,15 +60,14 @@ def test_faces(cloud_config, capsys): assert 'Likelihood.POSSIBLE' in out -def test_faces_uri(cloud_config, capsys): - file_name = ('gs://{}/vision/face_no_surprise.jpg'.format( - cloud_config.storage_bucket)) +def test_faces_uri(capsys): + file_name = 'gs://{}/vision/face_no_surprise.jpg'.format(BUCKET) detect.detect_faces_uri(file_name) out, _ = capsys.readouterr() assert 'Likelihood.POSSIBLE' in out -def test_logos(cloud_config, capsys): +def test_logos(capsys): file_name = os.path.join( os.path.dirname(__file__), 'resources/logos.png') @@ -77,15 +76,14 @@ def test_logos(cloud_config, capsys): assert 'Google' in out -def test_logos_uri(cloud_config, capsys): - file_name = ('gs://{}/vision/logos.png'.format( - cloud_config.storage_bucket)) +def test_logos_uri(capsys): + file_name = 'gs://{}/vision/logos.png'.format(BUCKET) detect.detect_logos_uri(file_name) out, _ = capsys.readouterr() assert 'Google' in out -def test_safe_search(cloud_config, capsys): +def test_safe_search(capsys): file_name = os.path.join( os.path.dirname(__file__), 'resources/wakeupcat.jpg') @@ -94,15 +92,14 @@ def test_safe_search(cloud_config, capsys): assert 'Likelihood.VERY_LIKELY' in out -def test_safe_search_uri(cloud_config, capsys): - file_name = ('gs://{}/vision/wakeupcat.jpg'.format( - cloud_config.storage_bucket)) +def test_safe_search_uri(capsys): + file_name = 'gs://{}/vision/wakeupcat.jpg'.format(BUCKET) detect.detect_safe_search_uri(file_name) out, _ = capsys.readouterr() assert 'Likelihood.VERY_LIKELY' in out -def test_detect_text(cloud_config, capsys): +def test_detect_text(capsys): file_name = os.path.join( os.path.dirname(__file__), 'resources/text.jpg') @@ -111,15 +108,14 @@ def test_detect_text(cloud_config, capsys): assert '37%' in out -def test_detect_text_uri(cloud_config, capsys): - file_name = ('gs://{}/vision/text.jpg'.format( - cloud_config.storage_bucket)) +def test_detect_text_uri(capsys): + file_name = 'gs://{}/vision/text.jpg'.format(BUCKET) detect.detect_text_uri(file_name) out, _ = capsys.readouterr() assert '37%' in out -def test_detect_properties(cloud_config, capsys): +def test_detect_properties(capsys): file_name = os.path.join( os.path.dirname(__file__), 'resources/landmark.jpg') @@ -128,16 +124,15 @@ def test_detect_properties(cloud_config, capsys): assert 'frac' in out -def test_detect_properties_uri(cloud_config, capsys): - file_name = ('gs://{}/vision/landmark.jpg'.format( - cloud_config.storage_bucket)) +def test_detect_properties_uri(capsys): + file_name = 'gs://{}/vision/landmark.jpg'.format(BUCKET) detect.detect_properties_uri(file_name) out, _ = capsys.readouterr() assert 'frac' in out # Vision 1.1 tests -def test_detect_web(cloud_config, capsys): +def test_detect_web(capsys): file_name = os.path.join( os.path.dirname(__file__), 'resources/landmark.jpg') @@ -146,15 +141,14 @@ def test_detect_web(cloud_config, capsys): assert 'Description: Palace of Fine Arts Theatre' in out -def test_detect_web_uri(cloud_config, capsys): - file_name = ('gs://{}/vision/landmark.jpg'.format( - cloud_config.storage_bucket)) +def test_detect_web_uri(capsys): + file_name = 'gs://{}/vision/landmark.jpg'.format(BUCKET) detect.detect_web_uri(file_name) out, _ = capsys.readouterr() assert 'Description: Palace of Fine Arts Theatre' in out -def test_detect_document(cloud_config, capsys): +def test_detect_document(capsys): file_name = os.path.join( os.path.dirname(__file__), 'resources/text.jpg') @@ -163,15 +157,14 @@ def test_detect_document(cloud_config, capsys): assert '37%' in out -def test_detect_document_uri(cloud_config, capsys): - file_name = ('gs://{}/vision/text.jpg'.format( - cloud_config.storage_bucket)) +def test_detect_document_uri(capsys): + file_name = 'gs://{}/vision/text.jpg'.format(BUCKET) detect.detect_document_uri(file_name) out, _ = capsys.readouterr() assert '37%' in out -def test_detect_crop_hints(cloud_config, capsys): +def test_detect_crop_hints(capsys): file_name = os.path.join( os.path.dirname(__file__), 'resources/wakeupcat.jpg') @@ -180,9 +173,8 @@ def test_detect_crop_hints(cloud_config, capsys): assert 'bounds: (0,0)' in out -def test_detect_crop_hints_uri(cloud_config, capsys): - file_name = ('gs://{}/vision/wakeupcat.jpg'.format( - cloud_config.storage_bucket)) +def test_detect_crop_hints_uri(capsys): + file_name = 'gs://{}/vision/wakeupcat.jpg'.format(BUCKET) detect.detect_crop_hints_uri(file_name) out, _ = capsys.readouterr() assert 'bounds: (0,0)' in out diff --git a/vision/cloud-client/document_text/doctext_test.py b/vision/cloud-client/document_text/doctext_test.py index 4ee892a832f0..cb881e319671 100644 --- a/vision/cloud-client/document_text/doctext_test.py +++ b/vision/cloud-client/document_text/doctext_test.py @@ -17,7 +17,7 @@ import doctext -def test_text(cloud_config, capsys): +def test_text(capsys): """Checks the output image for drawing the crop hint is created.""" doctext.render_doc_text('resources/text_menu.jpg', 'output-text.jpg') out, _ = capsys.readouterr() diff --git a/vision/cloud-client/web/web_detect_test.py b/vision/cloud-client/web/web_detect_test.py index d5cd08b68fe4..590a5b676d4a 100644 --- a/vision/cloud-client/web/web_detect_test.py +++ b/vision/cloud-client/web/web_detect_test.py @@ -12,25 +12,29 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os + import web_detect +BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] + -def test_detect_file(cloud_config, capsys): +def test_detect_file(capsys): file_name = ('../detect/resources/landmark.jpg') web_detect.report(web_detect.annotate(file_name)) out, _ = capsys.readouterr() assert 'Description: Palace of Fine Arts Theatre' in out -def test_detect_web_gsuri(cloud_config, capsys): +def test_detect_web_gsuri(capsys): file_name = ('gs://{}/vision/landmark.jpg'.format( - cloud_config.storage_bucket)) + BUCKET)) web_detect.report(web_detect.annotate(file_name)) out, _ = capsys.readouterr() assert 'Description: Palace of Fine Arts Theatre' in out -def test_detect_web_http(cloud_config, capsys): +def test_detect_web_http(capsys): web_detect.report(web_detect.annotate('https://goo.gl/X4qcB6')) out, _ = capsys.readouterr() assert 'https://cloud.google.com/vision/' in out From 4c457fda3dad1ff6b1fc4d5b5971635c8639e99a Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 4 Apr 2017 15:14:31 -0700 Subject: [PATCH 2/3] Fix client secrets --- bigquery/api/installed_app_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery/api/installed_app_test.py b/bigquery/api/installed_app_test.py index 313f46569769..2401d8931cd8 100644 --- a/bigquery/api/installed_app_test.py +++ b/bigquery/api/installed_app_test.py @@ -19,7 +19,7 @@ import installed_app PROJECT = os.environ['GCLOUD_PROJECT'] -CLIENT_SECRETS = os.environ['CLIENT_SECRETS'] +CLIENT_SECRETS = os.environ['GOOGLE_CLIENT_SECRETS'] class Namespace(object): From 9fbc49ac01ca9fe88e562c2d15c7e46c73390645 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 4 Apr 2017 15:34:11 -0700 Subject: [PATCH 3/3] Fix bigtable instance --- bigtable/hello/main_test.py | 4 ++-- bigtable/hello_happybase/main_test.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/bigtable/hello/main_test.py b/bigtable/hello/main_test.py index 98c4dd48ff3b..4080d7ee2732 100644 --- a/bigtable/hello/main_test.py +++ b/bigtable/hello/main_test.py @@ -18,7 +18,7 @@ from main import main PROJECT = os.environ['GCLOUD_PROJECT'] -BIGTABLE_INSTANCE = os.environ['BIGTABLE_INSTANCE'] +BIGTABLE_CLUSTER = os.environ['BIGTABLE_CLUSTER'] TABLE_NAME_FORMAT = 'hello-bigtable-system-tests-{}' TABLE_NAME_RANGE = 10000 @@ -27,7 +27,7 @@ def test_main(capsys): table_name = TABLE_NAME_FORMAT.format( random.randrange(TABLE_NAME_RANGE)) - main(PROJECT, BIGTABLE_INSTANCE, table_name) + main(PROJECT, BIGTABLE_CLUSTER, table_name) out, _ = capsys.readouterr() assert 'Creating the {} table.'.format(table_name) in out diff --git a/bigtable/hello_happybase/main_test.py b/bigtable/hello_happybase/main_test.py index 2d4f4bdbf798..3fc4ad134742 100644 --- a/bigtable/hello_happybase/main_test.py +++ b/bigtable/hello_happybase/main_test.py @@ -18,7 +18,7 @@ from main import main PROJECT = os.environ['GCLOUD_PROJECT'] -BIGTABLE_INSTANCE = os.environ['BIGTABLE_INSTANCE'] +BIGTABLE_CLUSTER = os.environ['BIGTABLE_CLUSTER'] TABLE_NAME_FORMAT = 'hello_happybase-system-tests-{}' TABLE_NAME_RANGE = 10000 @@ -28,7 +28,7 @@ def test_main(capsys): random.randrange(TABLE_NAME_RANGE)) main( PROJECT, - BIGTABLE_INSTANCE, + BIGTABLE_CLUSTER, table_name) out, _ = capsys.readouterr()