diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
new file mode 100644
index 0000000000..47eb5c354d
--- /dev/null
+++ b/.github/CODEOWNERS
@@ -0,0 +1,11 @@
+# Code owners file.
+# This file controls who is tagged for review for any given pull request.
+#
+# For syntax help see:
+# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax
+
+
+# The api-spanner-python team is the default owner for anything not
+# explicitly taken by someone else.
+* @googleapis/api-spanner-python
+/samples/ @googleapis/api-spanner-python @googleapis/python-samples-owners
\ No newline at end of file
diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html
index 228529efe2..6316a537f7 100644
--- a/docs/_templates/layout.html
+++ b/docs/_templates/layout.html
@@ -21,8 +21,8 @@
- On January 1, 2020 this library will no longer support Python 2 on the latest released version.
- Previously released library versions will continue to be available. For more information please
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version.
+ Library versions released prior to that date will continue to be available. For more information please
visit
Python 2 support on Google Cloud.
{% block body %} {% endblock %}
diff --git a/samples/AUTHORING_GUIDE.md b/samples/AUTHORING_GUIDE.md
new file mode 100644
index 0000000000..55c97b32f4
--- /dev/null
+++ b/samples/AUTHORING_GUIDE.md
@@ -0,0 +1 @@
+See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md
\ No newline at end of file
diff --git a/samples/CONTRIBUTING.md b/samples/CONTRIBUTING.md
new file mode 100644
index 0000000000..34c882b6f1
--- /dev/null
+++ b/samples/CONTRIBUTING.md
@@ -0,0 +1 @@
+See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md
\ No newline at end of file
diff --git a/samples/samples/README.rst b/samples/samples/README.rst
new file mode 100644
index 0000000000..b0573c249b
--- /dev/null
+++ b/samples/samples/README.rst
@@ -0,0 +1,290 @@
+
+.. This file is automatically generated. Do not edit this file directly.
+
+Google Cloud Spanner Python Samples
+===============================================================================
+
+.. image:: https://gstatic.com/cloudssh/images/open-btn.png
+ :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=spanner/cloud-client/README.rst
+
+
+This directory contains samples for Google Cloud Spanner. `Google Cloud Spanner`_ is a highly scalable, transactional, managed, NewSQL database service. Cloud Spanner solves the need for a horizontally-scaling database with consistent global transactions and SQL semantics.
+
+
+
+
+.. _Google Cloud Spanner: https://cloud.google.com/spanner/docs
+
+
+Setup
+-------------------------------------------------------------------------------
+
+
+
+Authentication
+++++++++++++++
+
+This sample requires you to have authentication setup. Refer to the
+`Authentication Getting Started Guide`_ for instructions on setting up
+credentials for applications.
+
+.. _Authentication Getting Started Guide:
+ https://cloud.google.com/docs/authentication/getting-started
+
+
+
+
+Install Dependencies
+++++++++++++++++++++
+
+#. Clone python-docs-samples and change directory to the sample directory you want to use.
+
+ .. code-block:: bash
+
+ $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git
+
+#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions.
+
+ .. _Python Development Environment Setup Guide:
+ https://cloud.google.com/python/setup
+
+#. Create a virtualenv. Samples are compatible with Python 3.6+.
+
+ .. code-block:: bash
+
+ $ virtualenv env
+ $ source env/bin/activate
+
+#. Install the dependencies needed to run the samples.
+
+ .. code-block:: bash
+
+ $ pip install -r requirements.txt
+
+.. _pip: https://pip.pypa.io/
+.. _virtualenv: https://virtualenv.pypa.io/
+
+
+
+
+
+
+Samples
+-------------------------------------------------------------------------------
+
+
+Snippets
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. image:: https://gstatic.com/cloudssh/images/open-btn.png
+ :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=spanner/cloud-client/snippets.py,spanner/cloud-client/README.rst
+
+
+
+
+To run this sample:
+
+.. code-block:: bash
+
+ $ python snippets.py
+
+
+ usage: snippets.py [-h] [--database-id DATABASE_ID]
+ instance_id
+ {create_instance,create_database,insert_data,delete_data,query_data,read_data,read_stale_data,add_column,update_data,query_data_with_new_column,read_write_transaction,read_only_transaction,add_index,query_data_with_index,read_data_with_index,add_storing_index,read_data_with_storing_index,create_table_with_timestamp,insert_data_with_timestamp,add_timestamp_column,update_data_with_timestamp,query_data_with_timestamp,write_struct_data,query_with_struct,query_with_array_of_struct,query_struct_field,query_nested_struct_field,insert_data_with_dml,update_data_with_dml,delete_data_with_dml,update_data_with_dml_timestamp,dml_write_read_transaction,update_data_with_dml_struct,insert_with_dml,query_data_with_parameter,write_with_dml_transaction,update_data_with_partitioned_dml,delete_data_with_partitioned_dml,update_with_batch_dml,create_table_with_datatypes,insert_datatypes_data,query_data_with_array,query_data_with_bool,query_data_with_bytes,query_data_with_date,query_data_with_float,query_data_with_int,query_data_with_string,query_data_with_timestamp_parameter,query_data_with_query_options,create_client_with_query_options}
+ ...
+
+ This application demonstrates how to do basic operations using Cloud
+ Spanner.
+
+ For more information, see the README.rst under /spanner.
+
+ positional arguments:
+ instance_id Your Cloud Spanner instance ID.
+ {create_instance,create_database,insert_data,delete_data,query_data,read_data,read_stale_data,add_column,update_data,query_data_with_new_column,read_write_transaction,read_only_transaction,add_index,query_data_with_index,read_data_with_index,add_storing_index,read_data_with_storing_index,create_table_with_timestamp,insert_data_with_timestamp,add_timestamp_column,update_data_with_timestamp,query_data_with_timestamp,write_struct_data,query_with_struct,query_with_array_of_struct,query_struct_field,query_nested_struct_field,insert_data_with_dml,update_data_with_dml,delete_data_with_dml,update_data_with_dml_timestamp,dml_write_read_transaction,update_data_with_dml_struct,insert_with_dml,query_data_with_parameter,write_with_dml_transaction,update_data_with_partitioned_dml,delete_data_with_partitioned_dml,update_with_batch_dml,create_table_with_datatypes,insert_datatypes_data,query_data_with_array,query_data_with_bool,query_data_with_bytes,query_data_with_date,query_data_with_float,query_data_with_int,query_data_with_string,query_data_with_timestamp_parameter,query_data_with_query_options,create_client_with_query_options}
+ create_instance Creates an instance.
+ create_database Creates a database and tables for sample data.
+ insert_data Inserts sample data into the given database. The
+ database and table must already exist and can be
+ created using `create_database`.
+ delete_data Deletes sample data from the given database. The
+ database, table, and data must already exist and can
+ be created using `create_database` and `insert_data`.
+ query_data Queries sample data from the database using SQL.
+ read_data Reads sample data from the database.
+ read_stale_data Reads sample data from the database. The data is
+ exactly 15 seconds stale.
+ add_column Adds a new column to the Albums table in the example
+ database.
+ update_data Updates sample data in the database. This updates the
+ `MarketingBudget` column which must be created before
+ running this sample. You can add the column by running
+ the `add_column` sample or by running this DDL
+ statement against your database: ALTER TABLE Albums
+ ADD COLUMN MarketingBudget INT64
+ query_data_with_new_column
+ Queries sample data from the database using SQL. This
+ sample uses the `MarketingBudget` column. You can add
+ the column by running the `add_column` sample or by
+ running this DDL statement against your database:
+ ALTER TABLE Albums ADD COLUMN MarketingBudget INT64
+ read_write_transaction
+ Performs a read-write transaction to update two sample
+ records in the database. This will transfer 200,000
+ from the `MarketingBudget` field for the second Album
+ to the first Album. If the `MarketingBudget` is too
+ low, it will raise an exception. Before running this
+ sample, you will need to run the `update_data` sample
+ to populate the fields.
+ read_only_transaction
+ Reads data inside of a read-only transaction. Within
+ the read-only transaction, or "snapshot", the
+ application sees consistent view of the database at a
+ particular timestamp.
+ add_index Adds a simple index to the example database.
+ query_data_with_index
+ Queries sample data from the database using SQL and an
+ index. The index must exist before running this
+ sample. You can add the index by running the
+ `add_index` sample or by running this DDL statement
+ against your database: CREATE INDEX AlbumsByAlbumTitle
+ ON Albums(AlbumTitle) This sample also uses the
+ `MarketingBudget` column. You can add the column by
+ running the `add_column` sample or by running this DDL
+ statement against your database: ALTER TABLE Albums
+ ADD COLUMN MarketingBudget INT64
+ read_data_with_index
+ Inserts sample data into the given database. The
+ database and table must already exist and can be
+ created using `create_database`.
+ add_storing_index Adds an storing index to the example database.
+ read_data_with_storing_index
+ Inserts sample data into the given database. The
+ database and table must already exist and can be
+ created using `create_database`.
+ create_table_with_timestamp
+ Creates a table with a COMMIT_TIMESTAMP column.
+ insert_data_with_timestamp
+ Inserts data with a COMMIT_TIMESTAMP field into a
+ table.
+ add_timestamp_column
+ Adds a new TIMESTAMP column to the Albums table in the
+ example database.
+ update_data_with_timestamp
+ Updates Performances tables in the database with the
+ COMMIT_TIMESTAMP column. This updates the
+ `MarketingBudget` column which must be created before
+ running this sample. You can add the column by running
+ the `add_column` sample or by running this DDL
+ statement against your database: ALTER TABLE Albums
+ ADD COLUMN MarketingBudget INT64 In addition this
+ update expects the LastUpdateTime column added by
+ applying this DDL statement against your database:
+ ALTER TABLE Albums ADD COLUMN LastUpdateTime TIMESTAMP
+ OPTIONS(allow_commit_timestamp=true)
+ query_data_with_timestamp
+ Queries sample data from the database using SQL. This
+ updates the `LastUpdateTime` column which must be
+ created before running this sample. You can add the
+ column by running the `add_timestamp_column` sample or
+ by running this DDL statement against your database:
+ ALTER TABLE Performances ADD COLUMN LastUpdateTime
+ TIMESTAMP OPTIONS (allow_commit_timestamp=true)
+ write_struct_data Inserts sample data that can be used to test STRUCT
+ parameters in queries.
+ query_with_struct Query a table using STRUCT parameters.
+ query_with_array_of_struct
+ Query a table using an array of STRUCT parameters.
+ query_struct_field Query a table using field access on a STRUCT
+ parameter.
+ query_nested_struct_field
+ Query a table using nested field access on a STRUCT
+ parameter.
+ insert_data_with_dml
+ Inserts sample data into the given database using a
+ DML statement.
+ update_data_with_dml
+ Updates sample data from the database using a DML
+ statement.
+ delete_data_with_dml
+ Deletes sample data from the database using a DML
+ statement.
+ update_data_with_dml_timestamp
+ Updates data with Timestamp from the database using a
+ DML statement.
+ dml_write_read_transaction
+ First inserts data then reads it from within a
+ transaction using DML.
+ update_data_with_dml_struct
+ Updates data with a DML statement and STRUCT
+ parameters.
+ insert_with_dml Inserts data with a DML statement into the database.
+ query_data_with_parameter
+ Queries sample data from the database using SQL with a
+ parameter.
+ write_with_dml_transaction
+ Transfers part of a marketing budget from one album to
+ another.
+ update_data_with_partitioned_dml
+ Update sample data with a partitioned DML statement.
+ delete_data_with_partitioned_dml
+ Delete sample data with a partitioned DML statement.
+ update_with_batch_dml
+ Updates sample data in the database using Batch DML.
+ create_table_with_datatypes
+ Creates a table with supported dataypes.
+ insert_datatypes_data
+ Inserts data with supported datatypes into a table.
+ query_data_with_array
+ Queries sample data using SQL with an ARRAY parameter.
+ query_data_with_bool
+ Queries sample data using SQL with a BOOL parameter.
+ query_data_with_bytes
+ Queries sample data using SQL with a BYTES parameter.
+ query_data_with_date
+ Queries sample data using SQL with a DATE parameter.
+ query_data_with_float
+ Queries sample data using SQL with a FLOAT64
+ parameter.
+ query_data_with_int
+ Queries sample data using SQL with a INT64 parameter.
+ query_data_with_string
+ Queries sample data using SQL with a STRING parameter.
+ query_data_with_timestamp_parameter
+ Queries sample data using SQL with a TIMESTAMP
+ parameter.
+ query_data_with_query_options
+ Queries sample data using SQL with query options.
+ create_client_with_query_options
+ Create a client with query options.
+
+ optional arguments:
+ -h, --help show this help message and exit
+ --database-id DATABASE_ID
+ Your Cloud Spanner database ID.
+
+
+
+
+
+
+
+
+
+The client library
+-------------------------------------------------------------------------------
+
+This sample uses the `Google Cloud Client Library for Python`_.
+You can read the documentation for more details on API usage and use GitHub
+to `browse the source`_ and `report issues`_.
+
+.. _Google Cloud Client Library for Python:
+ https://googlecloudplatform.github.io/google-cloud-python/
+.. _browse the source:
+ https://github.com/GoogleCloudPlatform/google-cloud-python
+.. _report issues:
+ https://github.com/GoogleCloudPlatform/google-cloud-python/issues
+
+
+
+.. _Google Cloud SDK: https://cloud.google.com/sdk/
diff --git a/samples/samples/README.rst.in b/samples/samples/README.rst.in
new file mode 100644
index 0000000000..542becb9a7
--- /dev/null
+++ b/samples/samples/README.rst.in
@@ -0,0 +1,24 @@
+# This file is used to generate README.rst
+
+product:
+ name: Google Cloud Spanner
+ short_name: Cloud Spanner
+ url: https://cloud.google.com/spanner/docs
+ description: >
+ `Google Cloud Spanner`_ is a highly scalable, transactional, managed,
+ NewSQL database service. Cloud Spanner solves the need for a
+ horizontally-scaling database with consistent global transactions and
+ SQL semantics.
+
+setup:
+- auth
+- install_deps
+
+samples:
+- name: Snippets
+ file: snippets.py
+ show_help: true
+
+cloud_client_library: true
+
+folder: spanner/cloud-client
\ No newline at end of file
diff --git a/samples/samples/backup_sample.py b/samples/samples/backup_sample.py
new file mode 100644
index 0000000000..19b758d560
--- /dev/null
+++ b/samples/samples/backup_sample.py
@@ -0,0 +1,314 @@
+# Copyright 2020 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This application demonstrates how to create and restore from backups
+using Cloud Spanner.
+
+For more information, see the README.rst under /spanner.
+"""
+
+import argparse
+from datetime import datetime, timedelta
+import time
+
+from google.cloud import spanner
+
+
+# [START spanner_create_backup]
+def create_backup(instance_id, database_id, backup_id):
+ """Creates a backup for a database."""
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ # Create a backup
+ expire_time = datetime.utcnow() + timedelta(days=14)
+ backup = instance.backup(backup_id, database=database, expire_time=expire_time)
+ operation = backup.create()
+
+ # Wait for backup operation to complete.
+ operation.result(1200)
+
+ # Verify that the backup is ready.
+ backup.reload()
+ assert backup.is_ready() is True
+
+ # Get the name, create time and backup size.
+ backup.reload()
+ print(
+ "Backup {} of size {} bytes was created at {}".format(
+ backup.name, backup.size_bytes, backup.create_time
+ )
+ )
+
+
+# [END spanner_create_backup]
+
+
+# [START spanner_restore_database]
+def restore_database(instance_id, new_database_id, backup_id):
+ """Restores a database from a backup."""
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ # Create a backup on database_id.
+
+ # Start restoring backup to a new database.
+ backup = instance.backup(backup_id)
+ new_database = instance.database(new_database_id)
+ operation = new_database.restore(backup)
+
+ # Wait for restore operation to complete.
+ operation.result(1200)
+
+ # Newly created database has restore information.
+ new_database.reload()
+ restore_info = new_database.restore_info
+ print(
+ "Database {} restored to {} from backup {}.".format(
+ restore_info.backup_info.source_database,
+ new_database_id,
+ restore_info.backup_info.backup,
+ )
+ )
+
+
+# [END spanner_restore_database]
+
+
+# [START spanner_cancel_backup]
+def cancel_backup(instance_id, database_id, backup_id):
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ expire_time = datetime.utcnow() + timedelta(days=30)
+
+ # Create a backup.
+ backup = instance.backup(backup_id, database=database, expire_time=expire_time)
+ operation = backup.create()
+
+ # Cancel backup creation.
+ operation.cancel()
+
+ # Cancel operations are best effort so either it will complete or
+ # be cancelled.
+ while not operation.done():
+ time.sleep(300) # 5 mins
+
+ # Deal with resource if the operation succeeded.
+ if backup.exists():
+ print("Backup was created before the cancel completed.")
+ backup.delete()
+ print("Backup deleted.")
+ else:
+ print("Backup creation was successfully cancelled.")
+
+
+# [END spanner_cancel_backup]
+
+
+# [START spanner_list_backup_operations]
+def list_backup_operations(instance_id, database_id):
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+
+ # List the CreateBackup operations.
+ filter_ = (
+ "(metadata.database:{}) AND "
+ "(metadata.@type:type.googleapis.com/"
+ "google.spanner.admin.database.v1.CreateBackupMetadata)"
+ ).format(database_id)
+ operations = instance.list_backup_operations(filter_=filter_)
+ for op in operations:
+ metadata = op.metadata
+ print(
+ "Backup {} on database {}: {}% complete.".format(
+ metadata.name, metadata.database, metadata.progress.progress_percent
+ )
+ )
+
+
+# [END spanner_list_backup_operations]
+
+
+# [START spanner_list_database_operations]
+def list_database_operations(instance_id):
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+
+ # List the progress of restore.
+ filter_ = (
+ "(metadata.@type:type.googleapis.com/"
+ "google.spanner.admin.database.v1.OptimizeRestoredDatabaseMetadata)"
+ )
+ operations = instance.list_database_operations(filter_=filter_)
+ for op in operations:
+ print(
+ "Database {} restored from backup is {}% optimized.".format(
+ op.metadata.name, op.metadata.progress.progress_percent
+ )
+ )
+
+
+# [END spanner_list_database_operations]
+
+
+# [START spanner_list_backups]
+def list_backups(instance_id, database_id, backup_id):
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+
+ # List all backups.
+ print("All backups:")
+ for backup in instance.list_backups():
+ print(backup.name)
+
+ # List all backups that contain a name.
+ print('All backups with backup name containing "{}":'.format(backup_id))
+ for backup in instance.list_backups(filter_="name:{}".format(backup_id)):
+ print(backup.name)
+
+ # List all backups for a database that contains a name.
+ print('All backups with database name containing "{}":'.format(database_id))
+ for backup in instance.list_backups(filter_="database:{}".format(database_id)):
+ print(backup.name)
+
+ # List all backups that expire before a timestamp.
+ expire_time = datetime.utcnow().replace(microsecond=0) + timedelta(days=30)
+ print(
+ 'All backups with expire_time before "{}-{}-{}T{}:{}:{}Z":'.format(
+ *expire_time.timetuple()
+ )
+ )
+ for backup in instance.list_backups(
+ filter_='expire_time < "{}-{}-{}T{}:{}:{}Z"'.format(*expire_time.timetuple())
+ ):
+ print(backup.name)
+
+ # List all backups with a size greater than some bytes.
+ print("All backups with backup size more than 100 bytes:")
+ for backup in instance.list_backups(filter_="size_bytes > 100"):
+ print(backup.name)
+
+ # List backups that were created after a timestamp that are also ready.
+ create_time = datetime.utcnow().replace(microsecond=0) - timedelta(days=1)
+ print(
+ 'All backups created after "{}-{}-{}T{}:{}:{}Z" and are READY:'.format(
+ *create_time.timetuple()
+ )
+ )
+ for backup in instance.list_backups(
+ filter_='create_time >= "{}-{}-{}T{}:{}:{}Z" AND state:READY'.format(
+ *create_time.timetuple()
+ )
+ ):
+ print(backup.name)
+
+ print("All backups with pagination")
+ for page in instance.list_backups(page_size=2).pages:
+ for backup in page:
+ print(backup.name)
+
+
+# [END spanner_list_backups]
+
+
+# [START spanner_delete_backup]
+def delete_backup(instance_id, backup_id):
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ backup = instance.backup(backup_id)
+ backup.reload()
+
+ # Wait for databases that reference this backup to finish optimizing.
+ while backup.referencing_databases:
+ time.sleep(30)
+ backup.reload()
+
+ # Delete the backup.
+ backup.delete()
+
+ # Verify that the backup is deleted.
+ assert backup.exists() is False
+ print("Backup {} has been deleted.".format(backup.name))
+
+
+# [END spanner_delete_backup]
+
+
+# [START spanner_update_backup]
+def update_backup(instance_id, backup_id):
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ backup = instance.backup(backup_id)
+ backup.reload()
+
+ # Expire time must be within 366 days of the create time of the backup.
+ old_expire_time = backup.expire_time
+ new_expire_time = old_expire_time + timedelta(days=30)
+ backup.update_expire_time(new_expire_time)
+ print(
+ "Backup {} expire time was updated from {} to {}.".format(
+ backup.name, old_expire_time, new_expire_time
+ )
+ )
+
+
+# [END spanner_update_backup]
+
+
+if __name__ == "__main__": # noqa: C901
+ parser = argparse.ArgumentParser(
+ description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
+ )
+ parser.add_argument("instance_id", help="Your Cloud Spanner instance ID.")
+ parser.add_argument(
+ "--database-id", help="Your Cloud Spanner database ID.", default="example_db"
+ )
+ parser.add_argument(
+ "--backup-id", help="Your Cloud Spanner backup ID.", default="example_backup"
+ )
+
+ subparsers = parser.add_subparsers(dest="command")
+ subparsers.add_parser("create_backup", help=create_backup.__doc__)
+ subparsers.add_parser("cancel_backup", help=cancel_backup.__doc__)
+ subparsers.add_parser("update_backup", help=update_backup.__doc__)
+ subparsers.add_parser("restore_database", help=restore_database.__doc__)
+ subparsers.add_parser("list_backups", help=list_backups.__doc__)
+ subparsers.add_parser("list_backup_operations", help=list_backup_operations.__doc__)
+ subparsers.add_parser(
+ "list_database_operations", help=list_database_operations.__doc__
+ )
+ subparsers.add_parser("delete_backup", help=delete_backup.__doc__)
+
+ args = parser.parse_args()
+
+ if args.command == "create_backup":
+ create_backup(args.instance_id, args.database_id, args.backup_id)
+ elif args.command == "cancel_backup":
+ cancel_backup(args.instance_id, args.database_id, args.backup_id)
+ elif args.command == "update_backup":
+ update_backup(args.instance_id, args.backup_id)
+ elif args.command == "restore_database":
+ restore_database(args.instance_id, args.database_id, args.backup_id)
+ elif args.command == "list_backups":
+ list_backups(args.instance_id, args.database_id, args.backup_id)
+ elif args.command == "list_backup_operations":
+ list_backup_operations(args.instance_id, args.database_id)
+ elif args.command == "list_database_operations":
+ list_database_operations(args.instance_id)
+ elif args.command == "delete_backup":
+ delete_backup(args.instance_id, args.backup_id)
+ else:
+ print("Command {} did not match expected commands.".format(args.command))
diff --git a/samples/samples/backup_sample_test.py b/samples/samples/backup_sample_test.py
new file mode 100644
index 0000000000..5a87c39d9d
--- /dev/null
+++ b/samples/samples/backup_sample_test.py
@@ -0,0 +1,112 @@
+# Copyright 2020 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import uuid
+
+from google.cloud import spanner
+import pytest
+
+import backup_sample
+
+
+def unique_instance_id():
+ """ Creates a unique id for the database. """
+ return f"test-instance-{uuid.uuid4().hex[:10]}"
+
+
+def unique_database_id():
+ """ Creates a unique id for the database. """
+ return f"test-db-{uuid.uuid4().hex[:10]}"
+
+
+def unique_backup_id():
+ """ Creates a unique id for the backup. """
+ return f"test-backup-{uuid.uuid4().hex[:10]}"
+
+
+INSTANCE_ID = unique_instance_id()
+DATABASE_ID = unique_database_id()
+RESTORE_DB_ID = unique_database_id()
+BACKUP_ID = unique_backup_id()
+
+
+@pytest.fixture(scope="module")
+def spanner_instance():
+ spanner_client = spanner.Client()
+ instance_config = "{}/instanceConfigs/{}".format(
+ spanner_client.project_name, "regional-us-central1"
+ )
+ instance = spanner_client.instance(INSTANCE_ID, instance_config)
+ op = instance.create()
+ op.result(120) # block until completion
+ yield instance
+ instance.delete()
+
+
+@pytest.fixture(scope="module")
+def database(spanner_instance):
+ """ Creates a temporary database that is removed after testing. """
+ db = spanner_instance.database(DATABASE_ID)
+ db.create()
+ yield db
+ db.drop()
+
+
+def test_create_backup(capsys, database):
+ backup_sample.create_backup(INSTANCE_ID, DATABASE_ID, BACKUP_ID)
+ out, _ = capsys.readouterr()
+ assert BACKUP_ID in out
+
+
+def test_restore_database(capsys):
+ backup_sample.restore_database(INSTANCE_ID, RESTORE_DB_ID, BACKUP_ID)
+ out, _ = capsys.readouterr()
+ assert (DATABASE_ID + " restored to ") in out
+ assert (RESTORE_DB_ID + " from backup ") in out
+ assert BACKUP_ID in out
+
+
+def test_list_backup_operations(capsys, spanner_instance):
+ backup_sample.list_backup_operations(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert BACKUP_ID in out
+ assert DATABASE_ID in out
+
+
+def test_list_backups(capsys, spanner_instance):
+ backup_sample.list_backups(INSTANCE_ID, DATABASE_ID, BACKUP_ID)
+ out, _ = capsys.readouterr()
+ id_count = out.count(BACKUP_ID)
+ assert id_count == 7
+
+
+def test_update_backup(capsys):
+ backup_sample.update_backup(INSTANCE_ID, BACKUP_ID)
+ out, _ = capsys.readouterr()
+ assert BACKUP_ID in out
+
+
+def test_delete_backup(capsys, spanner_instance):
+ backup_sample.delete_backup(INSTANCE_ID, BACKUP_ID)
+ out, _ = capsys.readouterr()
+ assert BACKUP_ID in out
+
+
+def test_cancel_backup(capsys):
+ backup_sample.cancel_backup(INSTANCE_ID, DATABASE_ID, BACKUP_ID)
+ out, _ = capsys.readouterr()
+ cancel_success = "Backup creation was successfully cancelled." in out
+ cancel_failure = ("Backup was created before the cancel completed." in out) and (
+ "Backup deleted." in out
+ )
+ assert cancel_success or cancel_failure
diff --git a/samples/samples/batch_sample.py b/samples/samples/batch_sample.py
new file mode 100644
index 0000000000..553dc31517
--- /dev/null
+++ b/samples/samples/batch_sample.py
@@ -0,0 +1,90 @@
+# Copyright 2018 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This application demonstrates how to do batch operations using Cloud
+Spanner.
+
+For more information, see the README.rst under /spanner.
+"""
+
+import argparse
+import concurrent.futures
+import time
+
+from google.cloud import spanner
+
+
+# [START spanner_batch_client]
+def run_batch_query(instance_id, database_id):
+ """Runs an example batch query."""
+
+ # Expected Table Format:
+ # CREATE TABLE Singers (
+ # SingerId INT64 NOT NULL,
+ # FirstName STRING(1024),
+ # LastName STRING(1024),
+ # SingerInfo BYTES(MAX),
+ # ) PRIMARY KEY (SingerId);
+
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ # Create the batch transaction and generate partitions
+ snapshot = database.batch_snapshot()
+ partitions = snapshot.generate_read_batches(
+ table="Singers",
+ columns=("SingerId", "FirstName", "LastName"),
+ keyset=spanner.KeySet(all_=True),
+ )
+
+ # Create a pool of workers for the tasks
+ start = time.time()
+ with concurrent.futures.ThreadPoolExecutor() as executor:
+ futures = [executor.submit(process, snapshot, p) for p in partitions]
+
+ for future in concurrent.futures.as_completed(futures, timeout=3600):
+ finish, row_ct = future.result()
+ elapsed = finish - start
+ print(u"Completed {} rows in {} seconds".format(row_ct, elapsed))
+
+ # Clean up
+ snapshot.close()
+
+
+def process(snapshot, partition):
+ """Processes the requests of a query in an separate process."""
+ print("Started processing partition.")
+ row_ct = 0
+ for row in snapshot.process_read_batch(partition):
+ print(u"SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row))
+ row_ct += 1
+ return time.time(), row_ct
+
+
+# [END spanner_batch_client]
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
+ )
+ parser.add_argument("instance_id", help="Your Cloud Spanner instance ID.")
+ parser.add_argument(
+ "database_id", help="Your Cloud Spanner database ID.", default="example_db"
+ )
+
+ args = parser.parse_args()
+
+ run_batch_query(args.instance_id, args.database_id)
diff --git a/samples/samples/noxfile.py b/samples/samples/noxfile.py
new file mode 100644
index 0000000000..5660f08be4
--- /dev/null
+++ b/samples/samples/noxfile.py
@@ -0,0 +1,222 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+import os
+from pathlib import Path
+import sys
+
+import nox
+
+
+# WARNING - WARNING - WARNING - WARNING - WARNING
+# WARNING - WARNING - WARNING - WARNING - WARNING
+# DO NOT EDIT THIS FILE EVER!
+# WARNING - WARNING - WARNING - WARNING - WARNING
+# WARNING - WARNING - WARNING - WARNING - WARNING
+
+# Copy `noxfile_config.py` to your directory and modify it instead.
+
+
+# `TEST_CONFIG` dict is a configuration hook that allows users to
+# modify the test configurations. The values here should be in sync
+# with `noxfile_config.py`. Users will copy `noxfile_config.py` into
+# their directory and modify it.
+
+TEST_CONFIG = {
+ # You can opt out from the test for specific Python versions.
+ "ignored_versions": ["2.7"],
+ # An envvar key for determining the project id to use. Change it
+ # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
+ # build specific Cloud project. You can also use your own string
+ # to use your own Cloud project.
+ "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
+ # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
+ # A dictionary you want to inject into your test. Don't put any
+ # secrets here. These values will override predefined values.
+ "envs": {},
+}
+
+
+try:
+ # Ensure we can import noxfile_config in the project's directory.
+ sys.path.append(".")
+ from noxfile_config import TEST_CONFIG_OVERRIDE
+except ImportError as e:
+ print("No user noxfile_config found: detail: {}".format(e))
+ TEST_CONFIG_OVERRIDE = {}
+
+# Update the TEST_CONFIG with the user supplied values.
+TEST_CONFIG.update(TEST_CONFIG_OVERRIDE)
+
+
+def get_pytest_env_vars():
+ """Returns a dict for pytest invocation."""
+ ret = {}
+
+ # Override the GCLOUD_PROJECT and the alias.
+ env_key = TEST_CONFIG["gcloud_project_env"]
+ # This should error out if not set.
+ ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key]
+
+ # Apply user supplied envs.
+ ret.update(TEST_CONFIG["envs"])
+ return ret
+
+
+# DO NOT EDIT - automatically generated.
+# All versions used to tested samples.
+ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"]
+
+# Any default versions that should be ignored.
+IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"]
+
+TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS])
+
+INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False))
+#
+# Style Checks
+#
+
+
+def _determine_local_import_names(start_dir):
+ """Determines all import names that should be considered "local".
+
+ This is used when running the linter to insure that import order is
+ properly checked.
+ """
+ file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)]
+ return [
+ basename
+ for basename, extension in file_ext_pairs
+ if extension == ".py"
+ or os.path.isdir(os.path.join(start_dir, basename))
+ and basename not in ("__pycache__")
+ ]
+
+
+# Linting with flake8.
+#
+# We ignore the following rules:
+# E203: whitespace before ‘:’
+# E266: too many leading ‘#’ for block comment
+# E501: line too long
+# I202: Additional newline in a section of imports
+#
+# We also need to specify the rules which are ignored by default:
+# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121']
+FLAKE8_COMMON_ARGS = [
+ "--show-source",
+ "--builtin=gettext",
+ "--max-complexity=20",
+ "--import-order-style=google",
+ "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py",
+ "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202",
+ "--max-line-length=88",
+]
+
+
+@nox.session
+def lint(session):
+ session.install("flake8", "flake8-import-order")
+
+ local_names = _determine_local_import_names(".")
+ args = FLAKE8_COMMON_ARGS + [
+ "--application-import-names",
+ ",".join(local_names),
+ ".",
+ ]
+ session.run("flake8", *args)
+
+
+#
+# Sample Tests
+#
+
+
+PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"]
+
+
+def _session_tests(session, post_install=None):
+ """Runs py.test for a particular project."""
+ if os.path.exists("requirements.txt"):
+ session.install("-r", "requirements.txt")
+
+ if os.path.exists("requirements-test.txt"):
+ session.install("-r", "requirements-test.txt")
+
+ if INSTALL_LIBRARY_FROM_SOURCE:
+ session.install("-e", _get_repo_root())
+
+ if post_install:
+ post_install(session)
+
+ session.run(
+ "pytest",
+ *(PYTEST_COMMON_ARGS + session.posargs),
+ # Pytest will return 5 when no tests are collected. This can happen
+ # on travis where slow and flaky tests are excluded.
+ # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html
+ success_codes=[0, 5],
+ env=get_pytest_env_vars()
+ )
+
+
+@nox.session(python=ALL_VERSIONS)
+def py(session):
+ """Runs py.test for a sample using the specified version of Python."""
+ if session.python in TESTED_VERSIONS:
+ _session_tests(session)
+ else:
+ session.skip(
+ "SKIPPED: {} tests are disabled for this sample.".format(session.python)
+ )
+
+
+#
+# Readmegen
+#
+
+
+def _get_repo_root():
+ """ Returns the root folder of the project. """
+ # Get root of this repository. Assume we don't have directories nested deeper than 10 items.
+ p = Path(os.getcwd())
+ for i in range(10):
+ if p is None:
+ break
+ if Path(p / ".git").exists():
+ return str(p)
+ p = p.parent
+ raise Exception("Unable to detect repository root.")
+
+
+GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")])
+
+
+@nox.session
+@nox.parametrize("path", GENERATED_READMES)
+def readmegen(session, path):
+ """(Re-)generates the readme for a sample."""
+ session.install("jinja2", "pyyaml")
+ dir_ = os.path.dirname(path)
+
+ if os.path.exists(os.path.join(dir_, "requirements.txt")):
+ session.install("-r", os.path.join(dir_, "requirements.txt"))
+
+ in_file = os.path.join(dir_, "README.rst.in")
+ session.run(
+ "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file
+ )
diff --git a/samples/samples/quickstart.py b/samples/samples/quickstart.py
new file mode 100644
index 0000000000..f19c5f48b2
--- /dev/null
+++ b/samples/samples/quickstart.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def run_quickstart():
+ # [START spanner_quickstart]
+ # Imports the Google Cloud Client Library.
+ from google.cloud import spanner
+
+ # Instantiate a client.
+ spanner_client = spanner.Client()
+
+ # Your Cloud Spanner instance ID.
+ instance_id = "my-instance-id"
+
+ # Get a Cloud Spanner instance by ID.
+ instance = spanner_client.instance(instance_id)
+
+ # Your Cloud Spanner database ID.
+ database_id = "my-database-id"
+
+ # Get a Cloud Spanner database by ID.
+ database = instance.database(database_id)
+
+ # Execute a simple SQL statement.
+ with database.snapshot() as snapshot:
+ results = snapshot.execute_sql("SELECT 1")
+
+ for row in results:
+ print(row)
+ # [END spanner_quickstart]
+
+
+if __name__ == "__main__":
+ run_quickstart()
diff --git a/samples/samples/quickstart_test.py b/samples/samples/quickstart_test.py
new file mode 100644
index 0000000000..d5c8d04160
--- /dev/null
+++ b/samples/samples/quickstart_test.py
@@ -0,0 +1,56 @@
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+from google.cloud import spanner
+import mock
+import pytest
+
+import quickstart
+
+SPANNER_INSTANCE = os.environ["SPANNER_INSTANCE"]
+
+
+@pytest.fixture
+def patch_instance():
+ original_instance = spanner.Client.instance
+
+ def new_instance(self, unused_instance_name):
+ return original_instance(self, SPANNER_INSTANCE)
+
+ instance_patch = mock.patch(
+ "google.cloud.spanner.Client.instance", side_effect=new_instance, autospec=True
+ )
+
+ with instance_patch:
+ yield
+
+
+@pytest.fixture
+def example_database():
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(SPANNER_INSTANCE)
+ database = instance.database("my-database-id")
+
+ if not database.exists():
+ database.create()
+
+ yield
+
+
+def test_quickstart(capsys, patch_instance, example_database):
+ quickstart.run_quickstart()
+ out, _ = capsys.readouterr()
+ assert "[1]" in out
diff --git a/samples/samples/requirements-test.txt b/samples/samples/requirements-test.txt
new file mode 100644
index 0000000000..676ff949e8
--- /dev/null
+++ b/samples/samples/requirements-test.txt
@@ -0,0 +1,2 @@
+pytest==5.4.3
+mock==4.0.2
diff --git a/samples/samples/requirements.txt b/samples/samples/requirements.txt
new file mode 100644
index 0000000000..5470bcdf5b
--- /dev/null
+++ b/samples/samples/requirements.txt
@@ -0,0 +1,2 @@
+google-cloud-spanner==1.17.1
+futures==3.3.0; python_version < "3"
diff --git a/samples/samples/snippets.py b/samples/samples/snippets.py
new file mode 100644
index 0000000000..1a2c8d60e6
--- /dev/null
+++ b/samples/samples/snippets.py
@@ -0,0 +1,1779 @@
+#!/usr/bin/env python
+
+# Copyright 2016 Google, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This application demonstrates how to do basic operations using Cloud
+Spanner.
+
+For more information, see the README.rst under /spanner.
+"""
+
+import argparse
+import base64
+import datetime
+
+from google.cloud import spanner
+from google.cloud.spanner_v1 import param_types
+
+
+# [START spanner_create_instance]
+def create_instance(instance_id):
+ """Creates an instance."""
+ spanner_client = spanner.Client()
+
+ config_name = "{}/instanceConfigs/regional-us-central1".format(
+ spanner_client.project_name
+ )
+
+ instance = spanner_client.instance(
+ instance_id,
+ configuration_name=config_name,
+ display_name="This is a display name.",
+ node_count=1,
+ )
+
+ operation = instance.create()
+
+ print("Waiting for operation to complete...")
+ operation.result(120)
+
+ print("Created instance {}".format(instance_id))
+
+
+# [END spanner_create_instance]
+
+
+# [START spanner_create_database]
+def create_database(instance_id, database_id):
+ """Creates a database and tables for sample data."""
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+
+ database = instance.database(
+ database_id,
+ ddl_statements=[
+ """CREATE TABLE Singers (
+ SingerId INT64 NOT NULL,
+ FirstName STRING(1024),
+ LastName STRING(1024),
+ SingerInfo BYTES(MAX)
+ ) PRIMARY KEY (SingerId)""",
+ """CREATE TABLE Albums (
+ SingerId INT64 NOT NULL,
+ AlbumId INT64 NOT NULL,
+ AlbumTitle STRING(MAX)
+ ) PRIMARY KEY (SingerId, AlbumId),
+ INTERLEAVE IN PARENT Singers ON DELETE CASCADE""",
+ ],
+ )
+
+ operation = database.create()
+
+ print("Waiting for operation to complete...")
+ operation.result(120)
+
+ print("Created database {} on instance {}".format(database_id, instance_id))
+
+
+# [END spanner_create_database]
+
+
+# [START spanner_insert_data]
+def insert_data(instance_id, database_id):
+ """Inserts sample data into the given database.
+
+ The database and table must already exist and can be created using
+ `create_database`.
+ """
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ with database.batch() as batch:
+ batch.insert(
+ table="Singers",
+ columns=("SingerId", "FirstName", "LastName"),
+ values=[
+ (1, u"Marc", u"Richards"),
+ (2, u"Catalina", u"Smith"),
+ (3, u"Alice", u"Trentor"),
+ (4, u"Lea", u"Martin"),
+ (5, u"David", u"Lomond"),
+ ],
+ )
+
+ batch.insert(
+ table="Albums",
+ columns=("SingerId", "AlbumId", "AlbumTitle"),
+ values=[
+ (1, 1, u"Total Junk"),
+ (1, 2, u"Go, Go, Go"),
+ (2, 1, u"Green"),
+ (2, 2, u"Forever Hold Your Peace"),
+ (2, 3, u"Terrified"),
+ ],
+ )
+
+ print("Inserted data.")
+
+
+# [END spanner_insert_data]
+
+
+# [START spanner_delete_data]
+def delete_data(instance_id, database_id):
+ """Deletes sample data from the given database.
+
+ The database, table, and data must already exist and can be created using
+ `create_database` and `insert_data`.
+ """
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ # Delete individual rows
+ albums_to_delete = spanner.KeySet(keys=[[2, 1], [2, 3]])
+
+ # Delete a range of rows where the column key is >=3 and <5
+ singers_range = spanner.KeyRange(start_closed=[3], end_open=[5])
+ singers_to_delete = spanner.KeySet(ranges=[singers_range])
+
+ # Delete remaining Singers rows, which will also delete the remaining
+ # Albums rows because Albums was defined with ON DELETE CASCADE
+ remaining_singers = spanner.KeySet(all_=True)
+
+ with database.batch() as batch:
+ batch.delete("Albums", albums_to_delete)
+ batch.delete("Singers", singers_to_delete)
+ batch.delete("Singers", remaining_singers)
+
+ print("Deleted data.")
+
+
+# [END spanner_delete_data]
+
+
+# [START spanner_query_data]
+def query_data(instance_id, database_id):
+ """Queries sample data from the database using SQL."""
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ with database.snapshot() as snapshot:
+ results = snapshot.execute_sql(
+ "SELECT SingerId, AlbumId, AlbumTitle FROM Albums"
+ )
+
+ for row in results:
+ print(u"SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row))
+
+
+# [END spanner_query_data]
+
+
+# [START spanner_read_data]
+def read_data(instance_id, database_id):
+ """Reads sample data from the database."""
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ with database.snapshot() as snapshot:
+ keyset = spanner.KeySet(all_=True)
+ results = snapshot.read(
+ table="Albums", columns=("SingerId", "AlbumId", "AlbumTitle"), keyset=keyset
+ )
+
+ for row in results:
+ print(u"SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row))
+
+
+# [END spanner_read_data]
+
+
+# [START spanner_read_stale_data]
+def read_stale_data(instance_id, database_id):
+ """Reads sample data from the database. The data is exactly 15 seconds
+ stale."""
+ import datetime
+
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+ staleness = datetime.timedelta(seconds=15)
+
+ with database.snapshot(exact_staleness=staleness) as snapshot:
+ keyset = spanner.KeySet(all_=True)
+ results = snapshot.read(
+ table="Albums",
+ columns=("SingerId", "AlbumId", "MarketingBudget"),
+ keyset=keyset,
+ )
+
+ for row in results:
+ print(u"SingerId: {}, AlbumId: {}, MarketingBudget: {}".format(*row))
+
+
+# [END spanner_read_stale_data]
+
+
+# [START spanner_query_data_with_new_column]
+def query_data_with_new_column(instance_id, database_id):
+ """Queries sample data from the database using SQL.
+
+ This sample uses the `MarketingBudget` column. You can add the column
+ by running the `add_column` sample or by running this DDL statement against
+ your database:
+
+ ALTER TABLE Albums ADD COLUMN MarketingBudget INT64
+ """
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ with database.snapshot() as snapshot:
+ results = snapshot.execute_sql(
+ "SELECT SingerId, AlbumId, MarketingBudget FROM Albums"
+ )
+
+ for row in results:
+ print(u"SingerId: {}, AlbumId: {}, MarketingBudget: {}".format(*row))
+
+
+# [END spanner_query_data_with_new_column]
+
+
+# [START spanner_create_index]
+def add_index(instance_id, database_id):
+ """Adds a simple index to the example database."""
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ operation = database.update_ddl(
+ ["CREATE INDEX AlbumsByAlbumTitle ON Albums(AlbumTitle)"]
+ )
+
+ print("Waiting for operation to complete...")
+ operation.result(120)
+
+ print("Added the AlbumsByAlbumTitle index.")
+
+
+# [END spanner_create_index]
+
+
+# [START spanner_query_data_with_index]
+def query_data_with_index(
+ instance_id, database_id, start_title="Aardvark", end_title="Goo"
+):
+ """Queries sample data from the database using SQL and an index.
+
+ The index must exist before running this sample. You can add the index
+ by running the `add_index` sample or by running this DDL statement against
+ your database:
+
+ CREATE INDEX AlbumsByAlbumTitle ON Albums(AlbumTitle)
+
+ This sample also uses the `MarketingBudget` column. You can add the column
+ by running the `add_column` sample or by running this DDL statement against
+ your database:
+
+ ALTER TABLE Albums ADD COLUMN MarketingBudget INT64
+
+ """
+ from google.cloud.spanner_v1.proto import type_pb2
+
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ params = {"start_title": start_title, "end_title": end_title}
+ param_types = {
+ "start_title": type_pb2.Type(code=type_pb2.STRING),
+ "end_title": type_pb2.Type(code=type_pb2.STRING),
+ }
+
+ with database.snapshot() as snapshot:
+ results = snapshot.execute_sql(
+ "SELECT AlbumId, AlbumTitle, MarketingBudget "
+ "FROM Albums@{FORCE_INDEX=AlbumsByAlbumTitle} "
+ "WHERE AlbumTitle >= @start_title AND AlbumTitle < @end_title",
+ params=params,
+ param_types=param_types,
+ )
+
+ for row in results:
+ print(u"AlbumId: {}, AlbumTitle: {}, " "MarketingBudget: {}".format(*row))
+
+
+# [END spanner_query_data_with_index]
+
+
+# [START spanner_read_data_with_index]
+def read_data_with_index(instance_id, database_id):
+ """Reads sample data from the database using an index.
+
+ The index must exist before running this sample. You can add the index
+ by running the `add_index` sample or by running this DDL statement against
+ your database:
+
+ CREATE INDEX AlbumsByAlbumTitle ON Albums(AlbumTitle)
+
+ """
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ with database.snapshot() as snapshot:
+ keyset = spanner.KeySet(all_=True)
+ results = snapshot.read(
+ table="Albums",
+ columns=("AlbumId", "AlbumTitle"),
+ keyset=keyset,
+ index="AlbumsByAlbumTitle",
+ )
+
+ for row in results:
+ print("AlbumId: {}, AlbumTitle: {}".format(*row))
+
+
+# [END spanner_read_data_with_index]
+
+
+# [START spanner_create_storing_index]
+def add_storing_index(instance_id, database_id):
+ """Adds an storing index to the example database."""
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ operation = database.update_ddl(
+ [
+ "CREATE INDEX AlbumsByAlbumTitle2 ON Albums(AlbumTitle)"
+ "STORING (MarketingBudget)"
+ ]
+ )
+
+ print("Waiting for operation to complete...")
+ operation.result(120)
+
+ print("Added the AlbumsByAlbumTitle2 index.")
+
+
+# [END spanner_create_storing_index]
+
+
+# [START spanner_read_data_with_storing_index]
+def read_data_with_storing_index(instance_id, database_id):
+ """Reads sample data from the database using an index with a storing
+ clause.
+
+ The index must exist before running this sample. You can add the index
+ by running the `add_soring_index` sample or by running this DDL statement
+ against your database:
+
+ CREATE INDEX AlbumsByAlbumTitle2 ON Albums(AlbumTitle)
+ STORING (MarketingBudget)
+
+ """
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ with database.snapshot() as snapshot:
+ keyset = spanner.KeySet(all_=True)
+ results = snapshot.read(
+ table="Albums",
+ columns=("AlbumId", "AlbumTitle", "MarketingBudget"),
+ keyset=keyset,
+ index="AlbumsByAlbumTitle2",
+ )
+
+ for row in results:
+ print(u"AlbumId: {}, AlbumTitle: {}, " "MarketingBudget: {}".format(*row))
+
+
+# [END spanner_read_data_with_storing_index]
+
+
+# [START spanner_add_column]
+def add_column(instance_id, database_id):
+ """Adds a new column to the Albums table in the example database."""
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ operation = database.update_ddl(
+ ["ALTER TABLE Albums ADD COLUMN MarketingBudget INT64"]
+ )
+
+ print("Waiting for operation to complete...")
+ operation.result(120)
+
+ print("Added the MarketingBudget column.")
+
+
+# [END spanner_add_column]
+
+
+# [START spanner_update_data]
+def update_data(instance_id, database_id):
+ """Updates sample data in the database.
+
+ This updates the `MarketingBudget` column which must be created before
+ running this sample. You can add the column by running the `add_column`
+ sample or by running this DDL statement against your database:
+
+ ALTER TABLE Albums ADD COLUMN MarketingBudget INT64
+
+ """
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ with database.batch() as batch:
+ batch.update(
+ table="Albums",
+ columns=("SingerId", "AlbumId", "MarketingBudget"),
+ values=[(1, 1, 100000), (2, 2, 500000)],
+ )
+
+ print("Updated data.")
+
+
+# [END spanner_update_data]
+
+
+# [START spanner_read_write_transaction]
+def read_write_transaction(instance_id, database_id):
+ """Performs a read-write transaction to update two sample records in the
+ database.
+
+ This will transfer 200,000 from the `MarketingBudget` field for the second
+ Album to the first Album. If the `MarketingBudget` is too low, it will
+ raise an exception.
+
+ Before running this sample, you will need to run the `update_data` sample
+ to populate the fields.
+ """
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ def update_albums(transaction):
+ # Read the second album budget.
+ second_album_keyset = spanner.KeySet(keys=[(2, 2)])
+ second_album_result = transaction.read(
+ table="Albums",
+ columns=("MarketingBudget",),
+ keyset=second_album_keyset,
+ limit=1,
+ )
+ second_album_row = list(second_album_result)[0]
+ second_album_budget = second_album_row[0]
+
+ transfer_amount = 200000
+
+ if second_album_budget < transfer_amount:
+ # Raising an exception will automatically roll back the
+ # transaction.
+ raise ValueError("The second album doesn't have enough funds to transfer")
+
+ # Read the first album's budget.
+ first_album_keyset = spanner.KeySet(keys=[(1, 1)])
+ first_album_result = transaction.read(
+ table="Albums",
+ columns=("MarketingBudget",),
+ keyset=first_album_keyset,
+ limit=1,
+ )
+ first_album_row = list(first_album_result)[0]
+ first_album_budget = first_album_row[0]
+
+ # Update the budgets.
+ second_album_budget -= transfer_amount
+ first_album_budget += transfer_amount
+ print(
+ "Setting first album's budget to {} and the second album's "
+ "budget to {}.".format(first_album_budget, second_album_budget)
+ )
+
+ # Update the rows.
+ transaction.update(
+ table="Albums",
+ columns=("SingerId", "AlbumId", "MarketingBudget"),
+ values=[(1, 1, first_album_budget), (2, 2, second_album_budget)],
+ )
+
+ database.run_in_transaction(update_albums)
+
+ print("Transaction complete.")
+
+
+# [END spanner_read_write_transaction]
+
+
+# [START spanner_read_only_transaction]
+def read_only_transaction(instance_id, database_id):
+ """Reads data inside of a read-only transaction.
+
+ Within the read-only transaction, or "snapshot", the application sees
+ consistent view of the database at a particular timestamp.
+ """
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ with database.snapshot(multi_use=True) as snapshot:
+ # Read using SQL.
+ results = snapshot.execute_sql(
+ "SELECT SingerId, AlbumId, AlbumTitle FROM Albums"
+ )
+
+ print("Results from first read:")
+ for row in results:
+ print(u"SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row))
+
+ # Perform another read using the `read` method. Even if the data
+ # is updated in-between the reads, the snapshot ensures that both
+ # return the same data.
+ keyset = spanner.KeySet(all_=True)
+ results = snapshot.read(
+ table="Albums", columns=("SingerId", "AlbumId", "AlbumTitle"), keyset=keyset
+ )
+
+ print("Results from second read:")
+ for row in results:
+ print(u"SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row))
+
+
+# [END spanner_read_only_transaction]
+
+
+# [START spanner_create_table_with_timestamp_column]
+def create_table_with_timestamp(instance_id, database_id):
+ """Creates a table with a COMMIT_TIMESTAMP column."""
+
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ operation = database.update_ddl(
+ [
+ """CREATE TABLE Performances (
+ SingerId INT64 NOT NULL,
+ VenueId INT64 NOT NULL,
+ EventDate Date,
+ Revenue INT64,
+ LastUpdateTime TIMESTAMP NOT NULL
+ OPTIONS(allow_commit_timestamp=true)
+ ) PRIMARY KEY (SingerId, VenueId, EventDate),
+ INTERLEAVE IN PARENT Singers ON DELETE CASCADE"""
+ ]
+ )
+
+ print("Waiting for operation to complete...")
+ operation.result(120)
+
+ print(
+ "Created Performances table on database {} on instance {}".format(
+ database_id, instance_id
+ )
+ )
+
+
+# [END spanner_create_table_with_timestamp_column]
+
+
+# [START spanner_insert_data_with_timestamp_column]
+def insert_data_with_timestamp(instance_id, database_id):
+ """Inserts data with a COMMIT_TIMESTAMP field into a table. """
+
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+
+ database = instance.database(database_id)
+
+ with database.batch() as batch:
+ batch.insert(
+ table="Performances",
+ columns=("SingerId", "VenueId", "EventDate", "Revenue", "LastUpdateTime"),
+ values=[
+ (1, 4, "2017-10-05", 11000, spanner.COMMIT_TIMESTAMP),
+ (1, 19, "2017-11-02", 15000, spanner.COMMIT_TIMESTAMP),
+ (2, 42, "2017-12-23", 7000, spanner.COMMIT_TIMESTAMP),
+ ],
+ )
+
+ print("Inserted data.")
+
+
+# [END spanner_insert_data_with_timestamp_column]
+
+
+# [START spanner_add_timestamp_column]
+def add_timestamp_column(instance_id, database_id):
+ """ Adds a new TIMESTAMP column to the Albums table in the example database.
+ """
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+
+ database = instance.database(database_id)
+
+ operation = database.update_ddl(
+ [
+ "ALTER TABLE Albums ADD COLUMN LastUpdateTime TIMESTAMP "
+ "OPTIONS(allow_commit_timestamp=true)"
+ ]
+ )
+
+ print("Waiting for operation to complete...")
+ operation.result(120)
+
+ print(
+ 'Altered table "Albums" on database {} on instance {}.'.format(
+ database_id, instance_id
+ )
+ )
+
+
+# [END spanner_add_timestamp_column]
+
+
+# [START spanner_update_data_with_timestamp_column]
+def update_data_with_timestamp(instance_id, database_id):
+ """Updates Performances tables in the database with the COMMIT_TIMESTAMP
+ column.
+
+ This updates the `MarketingBudget` column which must be created before
+ running this sample. You can add the column by running the `add_column`
+ sample or by running this DDL statement against your database:
+
+ ALTER TABLE Albums ADD COLUMN MarketingBudget INT64
+
+ In addition this update expects the LastUpdateTime column added by
+ applying this DDL statement against your database:
+
+ ALTER TABLE Albums ADD COLUMN LastUpdateTime TIMESTAMP
+ OPTIONS(allow_commit_timestamp=true)
+ """
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+
+ database = instance.database(database_id)
+
+ with database.batch() as batch:
+ batch.update(
+ table="Albums",
+ columns=("SingerId", "AlbumId", "MarketingBudget", "LastUpdateTime"),
+ values=[
+ (1, 1, 1000000, spanner.COMMIT_TIMESTAMP),
+ (2, 2, 750000, spanner.COMMIT_TIMESTAMP),
+ ],
+ )
+
+ print("Updated data.")
+
+
+# [END spanner_update_data_with_timestamp_column]
+
+
+# [START spanner_query_data_with_timestamp_column]
+def query_data_with_timestamp(instance_id, database_id):
+ """Queries sample data from the database using SQL.
+
+ This updates the `LastUpdateTime` column which must be created before
+ running this sample. You can add the column by running the
+ `add_timestamp_column` sample or by running this DDL statement
+ against your database:
+
+ ALTER TABLE Performances ADD COLUMN LastUpdateTime TIMESTAMP
+ OPTIONS (allow_commit_timestamp=true)
+
+ """
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+
+ database = instance.database(database_id)
+
+ with database.snapshot() as snapshot:
+ results = snapshot.execute_sql(
+ "SELECT SingerId, AlbumId, MarketingBudget FROM Albums "
+ "ORDER BY LastUpdateTime DESC"
+ )
+
+ for row in results:
+ print(u"SingerId: {}, AlbumId: {}, MarketingBudget: {}".format(*row))
+
+
+# [END spanner_query_data_with_timestamp_column]
+
+
+# [START spanner_write_data_for_struct_queries]
+def write_struct_data(instance_id, database_id):
+ """Inserts sample data that can be used to test STRUCT parameters
+ in queries.
+ """
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ with database.batch() as batch:
+ batch.insert(
+ table="Singers",
+ columns=("SingerId", "FirstName", "LastName"),
+ values=[
+ (6, u"Elena", u"Campbell"),
+ (7, u"Gabriel", u"Wright"),
+ (8, u"Benjamin", u"Martinez"),
+ (9, u"Hannah", u"Harris"),
+ ],
+ )
+
+ print("Inserted sample data for STRUCT queries")
+
+
+# [END spanner_write_data_for_struct_queries]
+
+
+def query_with_struct(instance_id, database_id):
+ """Query a table using STRUCT parameters. """
+ # [START spanner_create_struct_with_data]
+ record_type = param_types.Struct(
+ [
+ param_types.StructField("FirstName", param_types.STRING),
+ param_types.StructField("LastName", param_types.STRING),
+ ]
+ )
+ record_value = ("Elena", "Campbell")
+ # [END spanner_create_struct_with_data]
+
+ # [START spanner_query_data_with_struct]
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+
+ database = instance.database(database_id)
+
+ with database.snapshot() as snapshot:
+ results = snapshot.execute_sql(
+ "SELECT SingerId FROM Singers WHERE " "(FirstName, LastName) = @name",
+ params={"name": record_value},
+ param_types={"name": record_type},
+ )
+
+ for row in results:
+ print(u"SingerId: {}".format(*row))
+ # [END spanner_query_data_with_struct]
+
+
+def query_with_array_of_struct(instance_id, database_id):
+ """Query a table using an array of STRUCT parameters. """
+ # [START spanner_create_user_defined_struct]
+ name_type = param_types.Struct(
+ [
+ param_types.StructField("FirstName", param_types.STRING),
+ param_types.StructField("LastName", param_types.STRING),
+ ]
+ )
+ # [END spanner_create_user_defined_struct]
+
+ # [START spanner_create_array_of_struct_with_data]
+ band_members = [
+ ("Elena", "Campbell"),
+ ("Gabriel", "Wright"),
+ ("Benjamin", "Martinez"),
+ ]
+ # [END spanner_create_array_of_struct_with_data]
+
+ # [START spanner_query_data_with_array_of_struct]
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ with database.snapshot() as snapshot:
+ results = snapshot.execute_sql(
+ "SELECT SingerId FROM Singers WHERE "
+ "STRUCT
"
+ "(FirstName, LastName) IN UNNEST(@names)",
+ params={"names": band_members},
+ param_types={"names": param_types.Array(name_type)},
+ )
+
+ for row in results:
+ print(u"SingerId: {}".format(*row))
+ # [END spanner_query_data_with_array_of_struct]
+
+
+# [START spanner_field_access_on_struct_parameters]
+def query_struct_field(instance_id, database_id):
+ """Query a table using field access on a STRUCT parameter. """
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ name_type = param_types.Struct(
+ [
+ param_types.StructField("FirstName", param_types.STRING),
+ param_types.StructField("LastName", param_types.STRING),
+ ]
+ )
+
+ with database.snapshot() as snapshot:
+ results = snapshot.execute_sql(
+ "SELECT SingerId FROM Singers " "WHERE FirstName = @name.FirstName",
+ params={"name": ("Elena", "Campbell")},
+ param_types={"name": name_type},
+ )
+
+ for row in results:
+ print(u"SingerId: {}".format(*row))
+
+
+# [START spanner_field_access_on_struct_parameters]
+
+
+# [START spanner_field_access_on_nested_struct_parameters]
+def query_nested_struct_field(instance_id, database_id):
+ """Query a table using nested field access on a STRUCT parameter. """
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ song_info_type = param_types.Struct(
+ [
+ param_types.StructField("SongName", param_types.STRING),
+ param_types.StructField(
+ "ArtistNames",
+ param_types.Array(
+ param_types.Struct(
+ [
+ param_types.StructField("FirstName", param_types.STRING),
+ param_types.StructField("LastName", param_types.STRING),
+ ]
+ )
+ ),
+ ),
+ ]
+ )
+
+ song_info = ("Imagination", [("Elena", "Campbell"), ("Hannah", "Harris")])
+
+ with database.snapshot() as snapshot:
+ results = snapshot.execute_sql(
+ "SELECT SingerId, @song_info.SongName "
+ "FROM Singers WHERE "
+ "STRUCT"
+ "(FirstName, LastName) "
+ "IN UNNEST(@song_info.ArtistNames)",
+ params={"song_info": song_info},
+ param_types={"song_info": song_info_type},
+ )
+
+ for row in results:
+ print(u"SingerId: {} SongName: {}".format(*row))
+
+
+# [END spanner_field_access_on_nested_struct_parameters]
+
+
+def insert_data_with_dml(instance_id, database_id):
+ """Inserts sample data into the given database using a DML statement. """
+ # [START spanner_dml_standard_insert]
+ # instance_id = "your-spanner-instance"
+ # database_id = "your-spanner-db-id"
+
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ def insert_singers(transaction):
+ row_ct = transaction.execute_update(
+ "INSERT Singers (SingerId, FirstName, LastName) "
+ " VALUES (10, 'Virginia', 'Watson')"
+ )
+
+ print("{} record(s) inserted.".format(row_ct))
+
+ database.run_in_transaction(insert_singers)
+ # [END spanner_dml_standard_insert]
+
+
+def update_data_with_dml(instance_id, database_id):
+ """Updates sample data from the database using a DML statement. """
+ # [START spanner_dml_standard_update]
+ # instance_id = "your-spanner-instance"
+ # database_id = "your-spanner-db-id"
+
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ def update_albums(transaction):
+ row_ct = transaction.execute_update(
+ "UPDATE Albums "
+ "SET MarketingBudget = MarketingBudget * 2 "
+ "WHERE SingerId = 1 and AlbumId = 1"
+ )
+
+ print("{} record(s) updated.".format(row_ct))
+
+ database.run_in_transaction(update_albums)
+ # [END spanner_dml_standard_update]
+
+
+def delete_data_with_dml(instance_id, database_id):
+ """Deletes sample data from the database using a DML statement. """
+ # [START spanner_dml_standard_delete]
+ # instance_id = "your-spanner-instance"
+ # database_id = "your-spanner-db-id"
+
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ def delete_singers(transaction):
+ row_ct = transaction.execute_update(
+ "DELETE FROM Singers WHERE FirstName = 'Alice'"
+ )
+
+ print("{} record(s) deleted.".format(row_ct))
+
+ database.run_in_transaction(delete_singers)
+ # [END spanner_dml_standard_delete]
+
+
+def update_data_with_dml_timestamp(instance_id, database_id):
+ """Updates data with Timestamp from the database using a DML statement. """
+ # [START spanner_dml_standard_update_with_timestamp]
+ # instance_id = "your-spanner-instance"
+ # database_id = "your-spanner-db-id"
+
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ def update_albums(transaction):
+ row_ct = transaction.execute_update(
+ "UPDATE Albums "
+ "SET LastUpdateTime = PENDING_COMMIT_TIMESTAMP() "
+ "WHERE SingerId = 1"
+ )
+
+ print("{} record(s) updated.".format(row_ct))
+
+ database.run_in_transaction(update_albums)
+ # [END spanner_dml_standard_update_with_timestamp]
+
+
+def dml_write_read_transaction(instance_id, database_id):
+ """First inserts data then reads it from within a transaction using DML."""
+ # [START spanner_dml_write_then_read]
+ # instance_id = "your-spanner-instance"
+ # database_id = "your-spanner-db-id"
+
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ def write_then_read(transaction):
+ # Insert record.
+ row_ct = transaction.execute_update(
+ "INSERT Singers (SingerId, FirstName, LastName) "
+ " VALUES (11, 'Timothy', 'Campbell')"
+ )
+ print("{} record(s) inserted.".format(row_ct))
+
+ # Read newly inserted record.
+ results = transaction.execute_sql(
+ "SELECT FirstName, LastName FROM Singers WHERE SingerId = 11"
+ )
+ for result in results:
+ print("FirstName: {}, LastName: {}".format(*result))
+
+ database.run_in_transaction(write_then_read)
+ # [END spanner_dml_write_then_read]
+
+
+def update_data_with_dml_struct(instance_id, database_id):
+ """Updates data with a DML statement and STRUCT parameters. """
+ # [START spanner_dml_structs]
+ # instance_id = "your-spanner-instance"
+ # database_id = "your-spanner-db-id"
+
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ record_type = param_types.Struct(
+ [
+ param_types.StructField("FirstName", param_types.STRING),
+ param_types.StructField("LastName", param_types.STRING),
+ ]
+ )
+ record_value = ("Timothy", "Campbell")
+
+ def write_with_struct(transaction):
+ row_ct = transaction.execute_update(
+ "UPDATE Singers SET LastName = 'Grant' "
+ "WHERE STRUCT"
+ "(FirstName, LastName) = @name",
+ params={"name": record_value},
+ param_types={"name": record_type},
+ )
+ print("{} record(s) updated.".format(row_ct))
+
+ database.run_in_transaction(write_with_struct)
+ # [END spanner_dml_structs]
+
+
+def insert_with_dml(instance_id, database_id):
+ """Inserts data with a DML statement into the database. """
+ # [START spanner_dml_getting_started_insert]
+ # instance_id = "your-spanner-instance"
+ # database_id = "your-spanner-db-id"
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ def insert_singers(transaction):
+ row_ct = transaction.execute_update(
+ "INSERT Singers (SingerId, FirstName, LastName) VALUES "
+ "(12, 'Melissa', 'Garcia'), "
+ "(13, 'Russell', 'Morales'), "
+ "(14, 'Jacqueline', 'Long'), "
+ "(15, 'Dylan', 'Shaw')"
+ )
+ print("{} record(s) inserted.".format(row_ct))
+
+ database.run_in_transaction(insert_singers)
+ # [END spanner_dml_getting_started_insert]
+
+
+def query_data_with_parameter(instance_id, database_id):
+ """Queries sample data from the database using SQL with a parameter."""
+ # [START spanner_query_with_parameter]
+ # instance_id = "your-spanner-instance"
+ # database_id = "your-spanner-db-id"
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ with database.snapshot() as snapshot:
+ results = snapshot.execute_sql(
+ "SELECT SingerId, FirstName, LastName FROM Singers "
+ "WHERE LastName = @lastName",
+ params={"lastName": "Garcia"},
+ param_types={"lastName": spanner.param_types.STRING},
+ )
+
+ for row in results:
+ print(u"SingerId: {}, FirstName: {}, LastName: {}".format(*row))
+ # [END spanner_query_with_parameter]
+
+
+def write_with_dml_transaction(instance_id, database_id):
+ """ Transfers part of a marketing budget from one album to another. """
+ # [START spanner_dml_getting_started_update]
+ # instance_id = "your-spanner-instance"
+ # database_id = "your-spanner-db-id"
+
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ def transfer_budget(transaction):
+ # Transfer marketing budget from one album to another. Performed in a
+ # single transaction to ensure that the transfer is atomic.
+ second_album_result = transaction.execute_sql(
+ "SELECT MarketingBudget from Albums " "WHERE SingerId = 2 and AlbumId = 2"
+ )
+ second_album_row = list(second_album_result)[0]
+ second_album_budget = second_album_row[0]
+
+ transfer_amount = 200000
+
+ # Transaction will only be committed if this condition still holds at
+ # the time of commit. Otherwise it will be aborted and the callable
+ # will be rerun by the client library
+ if second_album_budget >= transfer_amount:
+ first_album_result = transaction.execute_sql(
+ "SELECT MarketingBudget from Albums "
+ "WHERE SingerId = 1 and AlbumId = 1"
+ )
+ first_album_row = list(first_album_result)[0]
+ first_album_budget = first_album_row[0]
+
+ second_album_budget -= transfer_amount
+ first_album_budget += transfer_amount
+
+ # Update first album
+ transaction.execute_update(
+ "UPDATE Albums "
+ "SET MarketingBudget = @AlbumBudget "
+ "WHERE SingerId = 1 and AlbumId = 1",
+ params={"AlbumBudget": first_album_budget},
+ param_types={"AlbumBudget": spanner.param_types.INT64},
+ )
+
+ # Update second album
+ transaction.execute_update(
+ "UPDATE Albums "
+ "SET MarketingBudget = @AlbumBudget "
+ "WHERE SingerId = 2 and AlbumId = 2",
+ params={"AlbumBudget": second_album_budget},
+ param_types={"AlbumBudget": spanner.param_types.INT64},
+ )
+
+ print(
+ "Transferred {} from Album2's budget to Album1's".format(
+ transfer_amount
+ )
+ )
+
+ database.run_in_transaction(transfer_budget)
+ # [END spanner_dml_getting_started_update]
+
+
+def update_data_with_partitioned_dml(instance_id, database_id):
+ """ Update sample data with a partitioned DML statement. """
+ # [START spanner_dml_partitioned_update]
+ # instance_id = "your-spanner-instance"
+ # database_id = "your-spanner-db-id"
+
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ row_ct = database.execute_partitioned_dml(
+ "UPDATE Albums SET MarketingBudget = 100000 WHERE SingerId > 1"
+ )
+
+ print("{} records updated.".format(row_ct))
+ # [END spanner_dml_partitioned_update]
+
+
+def delete_data_with_partitioned_dml(instance_id, database_id):
+ """ Delete sample data with a partitioned DML statement. """
+ # [START spanner_dml_partitioned_delete]
+ # instance_id = "your-spanner-instance"
+ # database_id = "your-spanner-db-id"
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ row_ct = database.execute_partitioned_dml("DELETE FROM Singers WHERE SingerId > 10")
+
+ print("{} record(s) deleted.".format(row_ct))
+ # [END spanner_dml_partitioned_delete]
+
+
+def update_with_batch_dml(instance_id, database_id):
+ """Updates sample data in the database using Batch DML. """
+ # [START spanner_dml_batch_update]
+ # instance_id = "your-spanner-instance"
+ # database_id = "your-spanner-db-id"
+
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ insert_statement = (
+ "INSERT INTO Albums "
+ "(SingerId, AlbumId, AlbumTitle, MarketingBudget) "
+ "VALUES (1, 3, 'Test Album Title', 10000)"
+ )
+
+ update_statement = (
+ "UPDATE Albums "
+ "SET MarketingBudget = MarketingBudget * 2 "
+ "WHERE SingerId = 1 and AlbumId = 3"
+ )
+
+ def update_albums(transaction):
+ row_cts = transaction.batch_update([insert_statement, update_statement])
+
+ print("Executed {} SQL statements using Batch DML.".format(len(row_cts)))
+
+ database.run_in_transaction(update_albums)
+ # [END spanner_dml_batch_update]
+
+
+def create_table_with_datatypes(instance_id, database_id):
+ """Creates a table with supported dataypes. """
+ # [START spanner_create_table_with_datatypes]
+ # instance_id = "your-spanner-instance"
+ # database_id = "your-spanner-db-id"
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ operation = database.update_ddl(
+ [
+ """CREATE TABLE Venues (
+ VenueId INT64 NOT NULL,
+ VenueName STRING(100),
+ VenueInfo BYTES(MAX),
+ Capacity INT64,
+ AvailableDates ARRAY,
+ LastContactDate DATE,
+ OutdoorVenue BOOL,
+ PopularityScore FLOAT64,
+ LastUpdateTime TIMESTAMP NOT NULL
+ OPTIONS(allow_commit_timestamp=true)
+ ) PRIMARY KEY (VenueId)"""
+ ]
+ )
+
+ print("Waiting for operation to complete...")
+ operation.result(120)
+
+ print(
+ "Created Venues table on database {} on instance {}".format(
+ database_id, instance_id
+ )
+ )
+ # [END spanner_create_table_with_datatypes]
+
+
+def insert_datatypes_data(instance_id, database_id):
+ """Inserts data with supported datatypes into a table. """
+ # [START spanner_insert_datatypes_data]
+ # instance_id = "your-spanner-instance"
+ # database_id = "your-spanner-db-id"
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ exampleBytes1 = base64.b64encode(u"Hello World 1".encode())
+ exampleBytes2 = base64.b64encode(u"Hello World 2".encode())
+ exampleBytes3 = base64.b64encode(u"Hello World 3".encode())
+ available_dates1 = ["2020-12-01", "2020-12-02", "2020-12-03"]
+ available_dates2 = ["2020-11-01", "2020-11-05", "2020-11-15"]
+ available_dates3 = ["2020-10-01", "2020-10-07"]
+ with database.batch() as batch:
+ batch.insert(
+ table="Venues",
+ columns=(
+ "VenueId",
+ "VenueName",
+ "VenueInfo",
+ "Capacity",
+ "AvailableDates",
+ "LastContactDate",
+ "OutdoorVenue",
+ "PopularityScore",
+ "LastUpdateTime",
+ ),
+ values=[
+ (
+ 4,
+ u"Venue 4",
+ exampleBytes1,
+ 1800,
+ available_dates1,
+ "2018-09-02",
+ False,
+ 0.85543,
+ spanner.COMMIT_TIMESTAMP,
+ ),
+ (
+ 19,
+ u"Venue 19",
+ exampleBytes2,
+ 6300,
+ available_dates2,
+ "2019-01-15",
+ True,
+ 0.98716,
+ spanner.COMMIT_TIMESTAMP,
+ ),
+ (
+ 42,
+ u"Venue 42",
+ exampleBytes3,
+ 3000,
+ available_dates3,
+ "2018-10-01",
+ False,
+ 0.72598,
+ spanner.COMMIT_TIMESTAMP,
+ ),
+ ],
+ )
+
+ print("Inserted data.")
+ # [END spanner_insert_datatypes_data]
+
+
+def query_data_with_array(instance_id, database_id):
+ """Queries sample data using SQL with an ARRAY parameter. """
+ # [START spanner_query_with_array_parameter]
+ # instance_id = "your-spanner-instance"
+ # database_id = "your-spanner-db-id"
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ exampleArray = ["2020-10-01", "2020-11-01"]
+ param = {"available_dates": exampleArray}
+ param_type = {"available_dates": param_types.Array(param_types.DATE)}
+
+ with database.snapshot() as snapshot:
+ results = snapshot.execute_sql(
+ "SELECT VenueId, VenueName, AvailableDate FROM Venues v,"
+ "UNNEST(v.AvailableDates) as AvailableDate "
+ "WHERE AvailableDate in UNNEST(@available_dates)",
+ params=param,
+ param_types=param_type,
+ )
+
+ for row in results:
+ print(u"VenueId: {}, VenueName: {}, AvailableDate: {}".format(*row))
+ # [END spanner_query_with_array_parameter]
+
+
+def query_data_with_bool(instance_id, database_id):
+ """Queries sample data using SQL with a BOOL parameter. """
+ # [START spanner_query_with_bool_parameter]
+ # instance_id = "your-spanner-instance"
+ # database_id = "your-spanner-db-id"
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ exampleBool = True
+ param = {"outdoor_venue": exampleBool}
+ param_type = {"outdoor_venue": param_types.BOOL}
+
+ with database.snapshot() as snapshot:
+ results = snapshot.execute_sql(
+ "SELECT VenueId, VenueName, OutdoorVenue FROM Venues "
+ "WHERE OutdoorVenue = @outdoor_venue",
+ params=param,
+ param_types=param_type,
+ )
+
+ for row in results:
+ print(u"VenueId: {}, VenueName: {}, OutdoorVenue: {}".format(*row))
+ # [END spanner_query_with_bool_parameter]
+
+
+def query_data_with_bytes(instance_id, database_id):
+ """Queries sample data using SQL with a BYTES parameter. """
+ # [START spanner_query_with_bytes_parameter]
+ # instance_id = "your-spanner-instance"
+ # database_id = "your-spanner-db-id"
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ exampleBytes = base64.b64encode(u"Hello World 1".encode())
+ param = {"venue_info": exampleBytes}
+ param_type = {"venue_info": param_types.BYTES}
+
+ with database.snapshot() as snapshot:
+ results = snapshot.execute_sql(
+ "SELECT VenueId, VenueName FROM Venues " "WHERE VenueInfo = @venue_info",
+ params=param,
+ param_types=param_type,
+ )
+
+ for row in results:
+ print(u"VenueId: {}, VenueName: {}".format(*row))
+ # [END spanner_query_with_bytes_parameter]
+
+
+def query_data_with_date(instance_id, database_id):
+ """Queries sample data using SQL with a DATE parameter. """
+ # [START spanner_query_with_date_parameter]
+ # instance_id = "your-spanner-instance"
+ # database_id = "your-spanner-db-id"
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ exampleDate = "2019-01-01"
+ param = {"last_contact_date": exampleDate}
+ param_type = {"last_contact_date": param_types.DATE}
+
+ with database.snapshot() as snapshot:
+ results = snapshot.execute_sql(
+ "SELECT VenueId, VenueName, LastContactDate FROM Venues "
+ "WHERE LastContactDate < @last_contact_date",
+ params=param,
+ param_types=param_type,
+ )
+
+ for row in results:
+ print(u"VenueId: {}, VenueName: {}, LastContactDate: {}".format(*row))
+ # [END spanner_query_with_date_parameter]
+
+
+def query_data_with_float(instance_id, database_id):
+ """Queries sample data using SQL with a FLOAT64 parameter. """
+ # [START spanner_query_with_float_parameter]
+ # instance_id = "your-spanner-instance"
+ # database_id = "your-spanner-db-id"
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ exampleFloat = 0.8
+ param = {"popularity_score": exampleFloat}
+ param_type = {"popularity_score": param_types.FLOAT64}
+
+ with database.snapshot() as snapshot:
+ results = snapshot.execute_sql(
+ "SELECT VenueId, VenueName, PopularityScore FROM Venues "
+ "WHERE PopularityScore > @popularity_score",
+ params=param,
+ param_types=param_type,
+ )
+
+ for row in results:
+ print(u"VenueId: {}, VenueName: {}, PopularityScore: {}".format(*row))
+ # [END spanner_query_with_float_parameter]
+
+
+def query_data_with_int(instance_id, database_id):
+ """Queries sample data using SQL with a INT64 parameter. """
+ # [START spanner_query_with_int_parameter]
+ # instance_id = "your-spanner-instance"
+ # database_id = "your-spanner-db-id"
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ exampleInt = 3000
+ param = {"capacity": exampleInt}
+ param_type = {"capacity": param_types.INT64}
+
+ with database.snapshot() as snapshot:
+ results = snapshot.execute_sql(
+ "SELECT VenueId, VenueName, Capacity FROM Venues "
+ "WHERE Capacity >= @capacity",
+ params=param,
+ param_types=param_type,
+ )
+
+ for row in results:
+ print(u"VenueId: {}, VenueName: {}, Capacity: {}".format(*row))
+ # [END spanner_query_with_int_parameter]
+
+
+def query_data_with_string(instance_id, database_id):
+ """Queries sample data using SQL with a STRING parameter. """
+ # [START spanner_query_with_string_parameter]
+ # instance_id = "your-spanner-instance"
+ # database_id = "your-spanner-db-id"
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ exampleString = "Venue 42"
+ param = {"venue_name": exampleString}
+ param_type = {"venue_name": param_types.STRING}
+
+ with database.snapshot() as snapshot:
+ results = snapshot.execute_sql(
+ "SELECT VenueId, VenueName FROM Venues " "WHERE VenueName = @venue_name",
+ params=param,
+ param_types=param_type,
+ )
+
+ for row in results:
+ print(u"VenueId: {}, VenueName: {}".format(*row))
+ # [END spanner_query_with_string_parameter]
+
+
+def query_data_with_timestamp_parameter(instance_id, database_id):
+ """Queries sample data using SQL with a TIMESTAMP parameter. """
+ # [START spanner_query_with_timestamp_parameter]
+ # instance_id = "your-spanner-instance"
+ # database_id = "your-spanner-db-id"
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ example_timestamp = datetime.datetime.utcnow().isoformat() + "Z"
+ param = {"last_update_time": example_timestamp}
+ param_type = {"last_update_time": param_types.TIMESTAMP}
+
+ with database.snapshot() as snapshot:
+ results = snapshot.execute_sql(
+ "SELECT VenueId, VenueName, LastUpdateTime FROM Venues "
+ "WHERE LastUpdateTime < @last_update_time",
+ params=param,
+ param_types=param_type,
+ )
+
+ for row in results:
+ print(u"VenueId: {}, VenueName: {}, LastUpdateTime: {}".format(*row))
+ # [END spanner_query_with_timestamp_parameter]
+
+
+def query_data_with_query_options(instance_id, database_id):
+ """Queries sample data using SQL with query options."""
+ # [START spanner_query_with_query_options]
+ # instance_id = "your-spanner-instance"
+ # database_id = "your-spanner-db-id"
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ with database.snapshot() as snapshot:
+ results = snapshot.execute_sql(
+ "SELECT VenueId, VenueName, LastUpdateTime FROM Venues",
+ query_options={"optimizer_version": "1"},
+ )
+
+ for row in results:
+ print(u"VenueId: {}, VenueName: {}, LastUpdateTime: {}".format(*row))
+ # [END spanner_query_with_query_options]
+
+
+def create_client_with_query_options(instance_id, database_id):
+ """Create a client with query options."""
+ # [START spanner_create_client_with_query_options]
+ # instance_id = "your-spanner-instance"
+ # database_id = "your-spanner-db-id"
+ spanner_client = spanner.Client(query_options={"optimizer_version": "1"})
+ instance = spanner_client.instance(instance_id)
+ database = instance.database(database_id)
+
+ with database.snapshot() as snapshot:
+ results = snapshot.execute_sql(
+ "SELECT VenueId, VenueName, LastUpdateTime FROM Venues"
+ )
+
+ for row in results:
+ print(u"VenueId: {}, VenueName: {}, LastUpdateTime: {}".format(*row))
+ # [END spanner_create_client_with_query_options]
+
+
+if __name__ == "__main__": # noqa: C901
+ parser = argparse.ArgumentParser(
+ description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
+ )
+ parser.add_argument("instance_id", help="Your Cloud Spanner instance ID.")
+ parser.add_argument(
+ "--database-id", help="Your Cloud Spanner database ID.", default="example_db"
+ )
+
+ subparsers = parser.add_subparsers(dest="command")
+ subparsers.add_parser("create_instance", help=create_instance.__doc__)
+ subparsers.add_parser("create_database", help=create_database.__doc__)
+ subparsers.add_parser("insert_data", help=insert_data.__doc__)
+ subparsers.add_parser("delete_data", help=delete_data.__doc__)
+ subparsers.add_parser("query_data", help=query_data.__doc__)
+ subparsers.add_parser("read_data", help=read_data.__doc__)
+ subparsers.add_parser("read_stale_data", help=read_stale_data.__doc__)
+ subparsers.add_parser("add_column", help=add_column.__doc__)
+ subparsers.add_parser("update_data", help=update_data.__doc__)
+ subparsers.add_parser(
+ "query_data_with_new_column", help=query_data_with_new_column.__doc__
+ )
+ subparsers.add_parser("read_write_transaction", help=read_write_transaction.__doc__)
+ subparsers.add_parser("read_only_transaction", help=read_only_transaction.__doc__)
+ subparsers.add_parser("add_index", help=add_index.__doc__)
+ query_data_with_index_parser = subparsers.add_parser(
+ "query_data_with_index", help=query_data_with_index.__doc__
+ )
+ query_data_with_index_parser.add_argument("--start_title", default="Aardvark")
+ query_data_with_index_parser.add_argument("--end_title", default="Goo")
+ subparsers.add_parser("read_data_with_index", help=insert_data.__doc__)
+ subparsers.add_parser("add_storing_index", help=add_storing_index.__doc__)
+ subparsers.add_parser("read_data_with_storing_index", help=insert_data.__doc__)
+ subparsers.add_parser(
+ "create_table_with_timestamp", help=create_table_with_timestamp.__doc__
+ )
+ subparsers.add_parser(
+ "insert_data_with_timestamp", help=insert_data_with_timestamp.__doc__
+ )
+ subparsers.add_parser("add_timestamp_column", help=add_timestamp_column.__doc__)
+ subparsers.add_parser(
+ "update_data_with_timestamp", help=update_data_with_timestamp.__doc__
+ )
+ subparsers.add_parser(
+ "query_data_with_timestamp", help=query_data_with_timestamp.__doc__
+ )
+ subparsers.add_parser("write_struct_data", help=write_struct_data.__doc__)
+ subparsers.add_parser("query_with_struct", help=query_with_struct.__doc__)
+ subparsers.add_parser(
+ "query_with_array_of_struct", help=query_with_array_of_struct.__doc__
+ )
+ subparsers.add_parser("query_struct_field", help=query_struct_field.__doc__)
+ subparsers.add_parser(
+ "query_nested_struct_field", help=query_nested_struct_field.__doc__
+ )
+ subparsers.add_parser("insert_data_with_dml", help=insert_data_with_dml.__doc__)
+ subparsers.add_parser("update_data_with_dml", help=update_data_with_dml.__doc__)
+ subparsers.add_parser("delete_data_with_dml", help=delete_data_with_dml.__doc__)
+ subparsers.add_parser(
+ "update_data_with_dml_timestamp", help=update_data_with_dml_timestamp.__doc__
+ )
+ subparsers.add_parser(
+ "dml_write_read_transaction", help=dml_write_read_transaction.__doc__
+ )
+ subparsers.add_parser(
+ "update_data_with_dml_struct", help=update_data_with_dml_struct.__doc__
+ )
+ subparsers.add_parser("insert_with_dml", help=insert_with_dml.__doc__)
+ subparsers.add_parser(
+ "query_data_with_parameter", help=query_data_with_parameter.__doc__
+ )
+ subparsers.add_parser(
+ "write_with_dml_transaction", help=write_with_dml_transaction.__doc__
+ )
+ subparsers.add_parser(
+ "update_data_with_partitioned_dml",
+ help=update_data_with_partitioned_dml.__doc__,
+ )
+ subparsers.add_parser(
+ "delete_data_with_partitioned_dml",
+ help=delete_data_with_partitioned_dml.__doc__,
+ )
+ subparsers.add_parser("update_with_batch_dml", help=update_with_batch_dml.__doc__)
+ subparsers.add_parser(
+ "create_table_with_datatypes", help=create_table_with_datatypes.__doc__
+ )
+ subparsers.add_parser("insert_datatypes_data", help=insert_datatypes_data.__doc__)
+ subparsers.add_parser("query_data_with_array", help=query_data_with_array.__doc__)
+ subparsers.add_parser("query_data_with_bool", help=query_data_with_bool.__doc__)
+ subparsers.add_parser("query_data_with_bytes", help=query_data_with_bytes.__doc__)
+ subparsers.add_parser("query_data_with_date", help=query_data_with_date.__doc__)
+ subparsers.add_parser("query_data_with_float", help=query_data_with_float.__doc__)
+ subparsers.add_parser("query_data_with_int", help=query_data_with_int.__doc__)
+ subparsers.add_parser("query_data_with_string", help=query_data_with_string.__doc__)
+ subparsers.add_parser(
+ "query_data_with_timestamp_parameter",
+ help=query_data_with_timestamp_parameter.__doc__,
+ )
+ subparsers.add_parser(
+ "query_data_with_query_options", help=query_data_with_query_options.__doc__
+ )
+ subparsers.add_parser(
+ "create_client_with_query_options",
+ help=create_client_with_query_options.__doc__,
+ )
+
+ args = parser.parse_args()
+
+ if args.command == "create_instance":
+ create_instance(args.instance_id)
+ elif args.command == "create_database":
+ create_database(args.instance_id, args.database_id)
+ elif args.command == "insert_data":
+ insert_data(args.instance_id, args.database_id)
+ elif args.command == "delete_data":
+ delete_data(args.instance_id, args.database_id)
+ elif args.command == "query_data":
+ query_data(args.instance_id, args.database_id)
+ elif args.command == "read_data":
+ read_data(args.instance_id, args.database_id)
+ elif args.command == "read_stale_data":
+ read_stale_data(args.instance_id, args.database_id)
+ elif args.command == "add_column":
+ add_column(args.instance_id, args.database_id)
+ elif args.command == "update_data":
+ update_data(args.instance_id, args.database_id)
+ elif args.command == "query_data_with_new_column":
+ query_data_with_new_column(args.instance_id, args.database_id)
+ elif args.command == "read_write_transaction":
+ read_write_transaction(args.instance_id, args.database_id)
+ elif args.command == "read_only_transaction":
+ read_only_transaction(args.instance_id, args.database_id)
+ elif args.command == "add_index":
+ add_index(args.instance_id, args.database_id)
+ elif args.command == "query_data_with_index":
+ query_data_with_index(
+ args.instance_id, args.database_id, args.start_title, args.end_title
+ )
+ elif args.command == "read_data_with_index":
+ read_data_with_index(args.instance_id, args.database_id)
+ elif args.command == "add_storing_index":
+ add_storing_index(args.instance_id, args.database_id)
+ elif args.command == "read_data_with_storing_index":
+ read_data_with_storing_index(args.instance_id, args.database_id)
+ elif args.command == "create_table_with_timestamp":
+ create_table_with_timestamp(args.instance_id, args.database_id)
+ elif args.command == "insert_data_with_timestamp":
+ insert_data_with_timestamp(args.instance_id, args.database_id)
+ elif args.command == "add_timestamp_column":
+ add_timestamp_column(args.instance_id, args.database_id)
+ elif args.command == "update_data_with_timestamp":
+ update_data_with_timestamp(args.instance_id, args.database_id)
+ elif args.command == "query_data_with_timestamp":
+ query_data_with_timestamp(args.instance_id, args.database_id)
+ elif args.command == "write_struct_data":
+ write_struct_data(args.instance_id, args.database_id)
+ elif args.command == "query_with_struct":
+ query_with_struct(args.instance_id, args.database_id)
+ elif args.command == "query_with_array_of_struct":
+ query_with_array_of_struct(args.instance_id, args.database_id)
+ elif args.command == "query_struct_field":
+ query_struct_field(args.instance_id, args.database_id)
+ elif args.command == "query_nested_struct_field":
+ query_nested_struct_field(args.instance_id, args.database_id)
+ elif args.command == "insert_data_with_dml":
+ insert_data_with_dml(args.instance_id, args.database_id)
+ elif args.command == "update_data_with_dml":
+ update_data_with_dml(args.instance_id, args.database_id)
+ elif args.command == "delete_data_with_dml":
+ delete_data_with_dml(args.instance_id, args.database_id)
+ elif args.command == "update_data_with_dml_timestamp":
+ update_data_with_dml_timestamp(args.instance_id, args.database_id)
+ elif args.command == "dml_write_read_transaction":
+ dml_write_read_transaction(args.instance_id, args.database_id)
+ elif args.command == "update_data_with_dml_struct":
+ update_data_with_dml_struct(args.instance_id, args.database_id)
+ elif args.command == "insert_with_dml":
+ insert_with_dml(args.instance_id, args.database_id)
+ elif args.command == "query_data_with_parameter":
+ query_data_with_parameter(args.instance_id, args.database_id)
+ elif args.command == "write_with_dml_transaction":
+ write_with_dml_transaction(args.instance_id, args.database_id)
+ elif args.command == "update_data_with_partitioned_dml":
+ update_data_with_partitioned_dml(args.instance_id, args.database_id)
+ elif args.command == "delete_data_with_partitioned_dml":
+ delete_data_with_partitioned_dml(args.instance_id, args.database_id)
+ elif args.command == "update_with_batch_dml":
+ update_with_batch_dml(args.instance_id, args.database_id)
+ elif args.command == "create_table_with_datatypes":
+ create_table_with_datatypes(args.instance_id, args.database_id)
+ elif args.command == "insert_datatypes_data":
+ insert_datatypes_data(args.instance_id, args.database_id)
+ elif args.command == "query_data_with_array":
+ query_data_with_array(args.instance_id, args.database_id)
+ elif args.command == "query_data_with_bool":
+ query_data_with_bool(args.instance_id, args.database_id)
+ elif args.command == "query_data_with_bytes":
+ query_data_with_bytes(args.instance_id, args.database_id)
+ elif args.command == "query_data_with_date":
+ query_data_with_date(args.instance_id, args.database_id)
+ elif args.command == "query_data_with_float":
+ query_data_with_float(args.instance_id, args.database_id)
+ elif args.command == "query_data_with_int":
+ query_data_with_int(args.instance_id, args.database_id)
+ elif args.command == "query_data_with_string":
+ query_data_with_string(args.instance_id, args.database_id)
+ elif args.command == "query_data_with_timestamp_parameter":
+ query_data_with_timestamp_parameter(args.instance_id, args.database_id)
+ elif args.command == "query_data_with_query_options":
+ query_data_with_query_options(args.instance_id, args.database_id)
+ elif args.command == "create_client_with_query_options":
+ create_client_with_query_options(args.instance_id, args.database_id)
diff --git a/samples/samples/snippets_test.py b/samples/samples/snippets_test.py
new file mode 100644
index 0000000000..a62a3d90aa
--- /dev/null
+++ b/samples/samples/snippets_test.py
@@ -0,0 +1,388 @@
+# Copyright 2016 Google, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import time
+import uuid
+
+from google.cloud import spanner
+import pytest
+
+import snippets
+
+
+def unique_instance_id():
+ """ Creates a unique id for the database. """
+ return f"test-instance-{uuid.uuid4().hex[:10]}"
+
+
+def unique_database_id():
+ """ Creates a unique id for the database. """
+ return f"test-db-{uuid.uuid4().hex[:10]}"
+
+
+INSTANCE_ID = unique_instance_id()
+DATABASE_ID = unique_database_id()
+
+
+@pytest.fixture(scope="module")
+def spanner_instance():
+ snippets.create_instance(INSTANCE_ID)
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(INSTANCE_ID)
+ yield instance
+ instance.delete()
+
+
+@pytest.fixture(scope="module")
+def database(spanner_instance):
+ """ Creates a temporary database that is removed after testing. """
+ snippets.create_database(INSTANCE_ID, DATABASE_ID)
+ db = spanner_instance.database(DATABASE_ID)
+ yield db
+ db.drop()
+
+
+def test_create_instance(spanner_instance):
+ # Reload will only succeed if the instance exists.
+ spanner_instance.reload()
+
+
+def test_create_database(database):
+ # Reload will only succeed if the database exists.
+ database.reload()
+
+
+def test_insert_data(capsys):
+ snippets.insert_data(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "Inserted data" in out
+
+
+def test_delete_data(capsys):
+ snippets.delete_data(INSTANCE_ID, DATABASE_ID)
+ snippets.insert_data(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "Deleted data" in out
+
+
+def test_query_data(capsys):
+ snippets.query_data(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk" in out
+
+
+def test_add_column(capsys):
+ snippets.add_column(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "Added the MarketingBudget column." in out
+
+
+def test_read_data(capsys):
+ snippets.read_data(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk" in out
+
+
+def test_update_data(capsys):
+ # Sleep for 15 seconds to ensure previous inserts will be
+ # 'stale' by the time test_read_stale_data is run.
+ time.sleep(15)
+
+ snippets.update_data(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "Updated data." in out
+
+
+def test_read_stale_data(capsys):
+ # This snippet relies on test_update_data inserting data
+ # at least 15 seconds after the previous insert
+ snippets.read_stale_data(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "SingerId: 1, AlbumId: 1, MarketingBudget: None" in out
+
+
+def test_read_write_transaction(capsys):
+ snippets.read_write_transaction(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "Transaction complete" in out
+
+
+def test_query_data_with_new_column(capsys):
+ snippets.query_data_with_new_column(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "SingerId: 1, AlbumId: 1, MarketingBudget: 300000" in out
+ assert "SingerId: 2, AlbumId: 2, MarketingBudget: 300000" in out
+
+
+def test_add_index(capsys):
+ snippets.add_index(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "Added the AlbumsByAlbumTitle index" in out
+
+
+def test_query_data_with_index(capsys):
+ snippets.query_data_with_index(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "Go, Go, Go" in out
+ assert "Forever Hold Your Peace" in out
+ assert "Green" not in out
+
+
+def test_read_data_with_index(capsys):
+ snippets.read_data_with_index(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "Go, Go, Go" in out
+ assert "Forever Hold Your Peace" in out
+ assert "Green" in out
+
+
+def test_add_storing_index(capsys):
+ snippets.add_storing_index(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "Added the AlbumsByAlbumTitle2 index." in out
+
+
+def test_read_data_with_storing_index(capsys):
+ snippets.read_data_with_storing_index(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "300000" in out
+
+
+def test_read_only_transaction(capsys):
+ snippets.read_only_transaction(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ # Snippet does two reads, so entry should be listed twice
+ assert out.count("SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk") == 2
+
+
+def test_add_timestamp_column(capsys):
+ snippets.add_timestamp_column(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert 'Altered table "Albums" on database ' in out
+
+
+def test_update_data_with_timestamp(capsys):
+ snippets.update_data_with_timestamp(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "Updated data" in out
+
+
+def test_query_data_with_timestamp(capsys):
+ snippets.query_data_with_timestamp(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "SingerId: 1, AlbumId: 1, MarketingBudget: 1000000" in out
+ assert "SingerId: 2, AlbumId: 2, MarketingBudget: 750000" in out
+
+
+def test_create_table_with_timestamp(capsys):
+ snippets.create_table_with_timestamp(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "Created Performances table on database" in out
+
+
+def test_insert_data_with_timestamp(capsys):
+ snippets.insert_data_with_timestamp(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "Inserted data." in out
+
+
+def test_write_struct_data(capsys):
+ snippets.write_struct_data(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "Inserted sample data for STRUCT queries" in out
+
+
+def test_query_with_struct(capsys):
+ snippets.query_with_struct(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "SingerId: 6" in out
+
+
+def test_query_with_array_of_struct(capsys):
+ snippets.query_with_array_of_struct(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "SingerId: 8" in out
+ assert "SingerId: 7" in out
+ assert "SingerId: 6" in out
+
+
+def test_query_struct_field(capsys):
+ snippets.query_struct_field(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "SingerId: 6" in out
+
+
+def test_query_nested_struct_field(capsys):
+ snippets.query_nested_struct_field(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "SingerId: 6 SongName: Imagination" in out
+ assert "SingerId: 9 SongName: Imagination" in out
+
+
+def test_insert_data_with_dml(capsys):
+ snippets.insert_data_with_dml(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "1 record(s) inserted." in out
+
+
+def test_update_data_with_dml(capsys):
+ snippets.update_data_with_dml(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "1 record(s) updated." in out
+
+
+def test_delete_data_with_dml(capsys):
+ snippets.delete_data_with_dml(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "1 record(s) deleted." in out
+
+
+def test_update_data_with_dml_timestamp(capsys):
+ snippets.update_data_with_dml_timestamp(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "2 record(s) updated." in out
+
+
+def test_dml_write_read_transaction(capsys):
+ snippets.dml_write_read_transaction(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "1 record(s) inserted." in out
+ assert "FirstName: Timothy, LastName: Campbell" in out
+
+
+def test_update_data_with_dml_struct(capsys):
+ snippets.update_data_with_dml_struct(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "1 record(s) updated" in out
+
+
+def test_insert_with_dml(capsys):
+ snippets.insert_with_dml(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "4 record(s) inserted" in out
+
+
+def test_query_data_with_parameter(capsys):
+ snippets.query_data_with_parameter(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "SingerId: 12, FirstName: Melissa, LastName: Garcia" in out
+
+
+def test_write_with_dml_transaction(capsys):
+ snippets.write_with_dml_transaction(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "Transferred 200000 from Album2's budget to Album1's" in out
+
+
+def update_data_with_partitioned_dml(capsys):
+ snippets.update_data_with_partitioned_dml(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "3 record(s) updated" in out
+
+
+def delete_data_with_partitioned_dml(capsys):
+ snippets.delete_data_with_partitioned_dml(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "5 record(s) deleted" in out
+
+
+def update_with_batch_dml(capsys):
+ snippets.update_with_batch_dml(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "Executed 2 SQL statements using Batch DML" in out
+
+
+def test_create_table_with_datatypes(capsys):
+ snippets.create_table_with_datatypes(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "Created Venues table on database" in out
+
+
+def test_insert_datatypes_data(capsys):
+ snippets.insert_datatypes_data(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "Inserted data." in out
+
+
+def test_query_data_with_array(capsys):
+ snippets.query_data_with_array(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "VenueId: 19, VenueName: Venue 19, AvailableDate: 2020-11-01" in out
+ assert "VenueId: 42, VenueName: Venue 42, AvailableDate: 2020-10-01" in out
+
+
+def test_query_data_with_bool(capsys):
+ snippets.query_data_with_bool(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "VenueId: 19, VenueName: Venue 19, OutdoorVenue: True" in out
+
+
+def test_query_data_with_bytes(capsys):
+ snippets.query_data_with_bytes(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "VenueId: 4, VenueName: Venue 4" in out
+
+
+def test_query_data_with_date(capsys):
+ snippets.query_data_with_date(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "VenueId: 4, VenueName: Venue 4, LastContactDate: 2018-09-02" in out
+ assert "VenueId: 42, VenueName: Venue 42, LastContactDate: 2018-10-01" in out
+
+
+def test_query_data_with_float(capsys):
+ snippets.query_data_with_float(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "VenueId: 4, VenueName: Venue 4, PopularityScore: 0.8" in out
+ assert "VenueId: 19, VenueName: Venue 19, PopularityScore: 0.9" in out
+
+
+def test_query_data_with_int(capsys):
+ snippets.query_data_with_int(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "VenueId: 19, VenueName: Venue 19, Capacity: 6300" in out
+ assert "VenueId: 42, VenueName: Venue 42, Capacity: 3000" in out
+
+
+def test_query_data_with_string(capsys):
+ snippets.query_data_with_string(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "VenueId: 42, VenueName: Venue 42" in out
+
+
+def test_query_data_with_timestamp_parameter(capsys):
+ # Wait 5 seconds to avoid a time drift issue for the next query:
+ # https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4197.
+ time.sleep(5)
+ snippets.query_data_with_timestamp_parameter(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "VenueId: 4, VenueName: Venue 4, LastUpdateTime:" in out
+ assert "VenueId: 19, VenueName: Venue 19, LastUpdateTime:" in out
+ assert "VenueId: 42, VenueName: Venue 42, LastUpdateTime:" in out
+
+
+def test_query_data_with_query_options(capsys):
+ snippets.query_data_with_query_options(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "VenueId: 4, VenueName: Venue 4, LastUpdateTime:" in out
+ assert "VenueId: 19, VenueName: Venue 19, LastUpdateTime:" in out
+ assert "VenueId: 42, VenueName: Venue 42, LastUpdateTime:" in out
+
+
+def test_create_client_with_query_options(capsys):
+ snippets.create_client_with_query_options(INSTANCE_ID, DATABASE_ID)
+ out, _ = capsys.readouterr()
+ assert "VenueId: 4, VenueName: Venue 4, LastUpdateTime:" in out
+ assert "VenueId: 19, VenueName: Venue 19, LastUpdateTime:" in out
+ assert "VenueId: 42, VenueName: Venue 42, LastUpdateTime:" in out
diff --git a/synth.metadata b/synth.metadata
index 3618f8cff9..df4ded371b 100644
--- a/synth.metadata
+++ b/synth.metadata
@@ -3,52 +3,22 @@
{
"git": {
"name": ".",
- "remote": "https://github.com/googleapis/python-spanner.git",
- "sha": "edfefc8aa2e74e0366b0f9208896c5637f1a0b11"
+ "remote": "git@github.com:larkee/python-spanner.git",
+ "sha": "3e54af6f8582e9620afb704e1d08994eab12c365"
}
},
{
"git": {
- "name": "googleapis",
- "remote": "https://github.com/googleapis/googleapis.git",
- "sha": "3474dc892349674efda09d74b3a574765d996188",
- "internalRef": "321098618"
+ "name": "synthtool",
+ "remote": "https://github.com/googleapis/synthtool.git",
+ "sha": "2686d7075fa456972bf4d08680d99617f5eb32b1"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "4f2c9f752a94042472fc03c5bd9e06e89817d2bd"
- }
- }
- ],
- "destinations": [
- {
- "client": {
- "source": "googleapis",
- "apiName": "spanner",
- "apiVersion": "v1",
- "language": "python",
- "generator": "bazel"
- }
- },
- {
- "client": {
- "source": "googleapis",
- "apiName": "spanner_admin_instance",
- "apiVersion": "v1",
- "language": "python",
- "generator": "bazel"
- }
- },
- {
- "client": {
- "source": "googleapis",
- "apiName": "spanner_admin_database",
- "apiVersion": "v1",
- "language": "python",
- "generator": "bazel"
+ "sha": "2686d7075fa456972bf4d08680d99617f5eb32b1"
}
}
]
diff --git a/synth.py b/synth.py
index 7f9540f72b..bf0c2f1b63 100644
--- a/synth.py
+++ b/synth.py
@@ -15,6 +15,7 @@
"""This script is used to synthesize generated parts of this library."""
import synthtool as s
from synthtool import gcp
+from synthtool.languages import python
gapic = gcp.GAPICBazel()
common = gcp.CommonTemplates()
@@ -151,7 +152,7 @@
# ----------------------------------------------------------------------------
# Add templated files
# ----------------------------------------------------------------------------
-templated_files = common.py_library(unit_cov_level=97, cov_level=99)
+templated_files = common.py_library(unit_cov_level=97, cov_level=99, samples=True)
s.move(templated_files, excludes=["noxfile.py"])
# Template's MANIFEST.in does not include the needed GAPIC config file.
@@ -171,4 +172,10 @@
"\n\g<0>",
)
+# ----------------------------------------------------------------------------
+# Samples templates
+# ----------------------------------------------------------------------------
+
+python.py_samples()
+
s.shell.run(["nox", "-s", "blacken"], hide_output=False)