Skip to content

Commit

Permalink
fix header and add more test
Browse files Browse the repository at this point in the history
  • Loading branch information
rahul2393 committed Jan 9, 2024
1 parent 59918ef commit 2a9182e
Show file tree
Hide file tree
Showing 6 changed files with 438 additions and 37 deletions.
2 changes: 1 addition & 1 deletion samples/generated/conftest.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copyright 2021 Google LLC All rights reserved.
# Copyright 2024 Google LLC All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand Down
17 changes: 9 additions & 8 deletions samples/generated/noxfile.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copyright 2019 Google LLC
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -160,6 +160,7 @@ def blacken(session: nox.sessions.Session) -> None:
# format = isort + black
#


@nox.session
def format(session: nox.sessions.Session) -> None:
"""
Expand Down Expand Up @@ -187,7 +188,9 @@ def _session_tests(
session: nox.sessions.Session, post_install: Callable = None
) -> None:
# check for presence of tests
test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob("**/test_*.py", recursive=True)
test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob(
"**/test_*.py", recursive=True
)
test_list.extend(glob.glob("**/tests", recursive=True))

if len(test_list) == 0:
Expand All @@ -209,9 +212,7 @@ def _session_tests(

if os.path.exists("requirements-test.txt"):
if os.path.exists("constraints-test.txt"):
session.install(
"-r", "requirements-test.txt", "-c", "constraints-test.txt"
)
session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt")
else:
session.install("-r", "requirements-test.txt")
with open("requirements-test.txt") as rtfile:
Expand All @@ -224,9 +225,9 @@ def _session_tests(
post_install(session)

if "pytest-parallel" in packages:
concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto'])
concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"])
elif "pytest-xdist" in packages:
concurrent_args.extend(['-n', 'auto'])
concurrent_args.extend(["-n", "auto"])

session.run(
"pytest",
Expand Down Expand Up @@ -256,7 +257,7 @@ def py(session: nox.sessions.Session) -> None:


def _get_repo_root() -> Optional[str]:
""" Returns the root folder of the project. """
"""Returns the root folder of the project."""
# Get root of this repository. Assume we don't have directories nested deeper than 10 items.
p = Path(os.getcwd())
for i in range(10):
Expand Down
132 changes: 132 additions & 0 deletions samples/generated/pg_snippet_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,132 @@
# Copyright 2024 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import uuid

from google.api_core import exceptions
from google.cloud.spanner_admin_database_v1.types.common import DatabaseDialect
import pytest
from test_utils.retry import RetryErrors

import pg_snippets as snippets

CREATE_TABLE_SINGERS = """\
CREATE TABLE Singers (
SingerId BIGINT NOT NULL,
FirstName CHARACTER VARYING(1024),
LastName CHARACTER VARYING(1024),
SingerInfo BYTEA,
FullName CHARACTER VARYING(2048)
GENERATED ALWAYS AS (FirstName || ' ' || LastName) STORED,
PRIMARY KEY (SingerId)
)
"""

CREATE_TABLE_ALBUMS = """\
CREATE TABLE Albums (
SingerId BIGINT NOT NULL,
AlbumId BIGINT NOT NULL,
AlbumTitle CHARACTER VARYING(1024),
PRIMARY KEY (SingerId, AlbumId)
) INTERLEAVE IN PARENT Singers ON DELETE CASCADE
"""

retry_429 = RetryErrors(exceptions.ResourceExhausted, delay=15)


@pytest.fixture(scope="module")
def sample_name():
return "pg_snippets"


@pytest.fixture(scope="module")
def database_dialect():
"""Spanner dialect to be used for this sample.
The dialect is used to initialize the dialect for the database.
It can either be GoogleStandardSql or PostgreSql.
"""
return DatabaseDialect.POSTGRESQL


@pytest.fixture(scope="module")
def create_instance_id():
"""Id for the low-cost instance."""
return f"create-instance-{uuid.uuid4().hex[:10]}"


@pytest.fixture(scope="module")
def lci_instance_id():
"""Id for the low-cost instance."""
return f"lci-instance-{uuid.uuid4().hex[:10]}"


@pytest.fixture(scope="module")
def database_id():
return f"test-db-{uuid.uuid4().hex[:10]}"


@pytest.fixture(scope="module")
def create_database_id():
return f"create-db-{uuid.uuid4().hex[:10]}"


@pytest.fixture(scope="module")
def cmek_database_id():
return f"cmek-db-{uuid.uuid4().hex[:10]}"


@pytest.fixture(scope="module")
def default_leader_database_id():
return f"leader_db_{uuid.uuid4().hex[:10]}"


@pytest.fixture(scope="module")
def database_ddl():
"""Sequence of DDL statements used to set up the database.
Sample testcase modules can override as needed.
"""
return [CREATE_TABLE_SINGERS, CREATE_TABLE_ALBUMS]


@pytest.fixture(scope="module")
def default_leader():
"""Default leader for multi-region instances."""
return "us-east4"


@pytest.mark.dependency(name="create_table_with_datatypes")
def test_create_table_with_datatypes(capsys, instance_id, sample_database):
snippets.create_table_with_datatypes(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "Created Venues table on database" in out


@pytest.mark.dependency(
name="insert_datatypes_data",
depends=["create_table_with_datatypes"],
)
def test_insert_datatypes_data(capsys, instance_id, sample_database):
snippets.insert_datatypes_data(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "Inserted data." in out


@pytest.mark.dependency(name="add_jsonb_column", depends=["insert_datatypes_data"])
def test_add_jsonb_column(capsys, instance_id, sample_database):
snippets.add_jsonb_column(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "Waiting for operation to complete..." in out
assert 'Altered table "Venues" on database ' in out
171 changes: 171 additions & 0 deletions samples/generated/pg_snippets.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,171 @@
#!/usr/bin/env python

# Copyright 2024 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""This application demonstrates how to do basic operations using Cloud
Spanner PostgreSql dialect.
For more information, see the README.rst under /spanner.
"""
import base64
import decimal

from google.cloud import spanner, spanner_admin_database_v1

OPERATION_TIMEOUT_SECONDS = 240


def create_table_with_datatypes(instance_id, database_id):
"""Creates a table with supported datatypes."""
# [START spanner_postgresql_create_table_with_datatypes]
# instance_id = "your-spanner-instance"
# database_id = "your-spanner-db-id"
spanner_client = spanner.Client()
instance = spanner_client.instance(instance_id)
database = instance.database(database_id)

request = spanner_admin_database_v1.UpdateDatabaseDdlRequest(
database=database.name,
statements=[
"""CREATE TABLE Venues (
VenueId BIGINT NOT NULL,
VenueName character varying(100),
VenueInfo BYTEA,
Capacity BIGINT,
OutdoorVenue BOOL,
PopularityScore FLOAT8,
Revenue NUMERIC,
LastUpdateTime SPANNER.COMMIT_TIMESTAMP NOT NULL,
PRIMARY KEY (VenueId))"""
],
)
operation = spanner_client.database_admin_api.update_database_ddl(request)

print("Waiting for operation to complete...")
operation.result(OPERATION_TIMEOUT_SECONDS)

print(
"Created Venues table on database {} on instance {}".format(
database_id, instance_id
)
)
# [END spanner_postgresql_create_table_with_datatypes]


# [START spanner_postgresql_jsonb_add_column]
def add_jsonb_column(instance_id, database_id):
"""
Alters Venues tables in the database adding a JSONB column.
You can create the table by running the `create_table_with_datatypes`
sample or by running this DDL statement against your database:
CREATE TABLE Venues (
VenueId BIGINT NOT NULL,
VenueName character varying(100),
VenueInfo BYTEA,
Capacity BIGINT,
OutdoorVenue BOOL,
PopularityScore FLOAT8,
Revenue NUMERIC,
LastUpdateTime SPANNER.COMMIT_TIMESTAMP NOT NULL,
PRIMARY KEY (VenueId))
"""
# instance_id = "your-spanner-instance"
# database_id = "your-spanner-db-id"

spanner_client = spanner.Client()
instance = spanner_client.instance(instance_id)
database = instance.database(database_id)

request = spanner_admin_database_v1.UpdateDatabaseDdlRequest(
database=database.name,
statements=["ALTER TABLE Venues ADD COLUMN VenueDetails JSONB"],
)

operation = spanner_client.database_admin_api.update_database_ddl(request)

print("Waiting for operation to complete...")
operation.result(OPERATION_TIMEOUT_SECONDS)

print(
'Altered table "Venues" on database {} on instance {}.'.format(
database_id, instance_id
)
)


# [END spanner_postgresql_jsonb_add_column]


def insert_datatypes_data(instance_id, database_id):
"""Inserts data with supported datatypes into a table."""
# [START spanner_postgresql_insert_datatypes_data]
# instance_id = "your-spanner-instance"
# database_id = "your-spanner-db-id"
spanner_client = spanner.Client()
instance = spanner_client.instance(instance_id)
database = instance.database(database_id)

exampleBytes1 = base64.b64encode("Hello World 1".encode())
exampleBytes2 = base64.b64encode("Hello World 2".encode())
exampleBytes3 = base64.b64encode("Hello World 3".encode())
with database.batch() as batch:
batch.insert(
table="Venues",
columns=(
"VenueId",
"VenueName",
"VenueInfo",
"Capacity",
"OutdoorVenue",
"PopularityScore",
"Revenue",
"LastUpdateTime",
),
values=[
(
4,
"Venue 4",
exampleBytes1,
1800,
False,
0.85543,
decimal.Decimal("215100.10"),
spanner.COMMIT_TIMESTAMP,
),
(
19,
"Venue 19",
exampleBytes2,
6300,
True,
0.98716,
decimal.Decimal("1200100.00"),
spanner.COMMIT_TIMESTAMP,
),
(
42,
"Venue 42",
exampleBytes3,
3000,
False,
0.72598,
decimal.Decimal("390650.99"),
spanner.COMMIT_TIMESTAMP,
),
],
)

print("Inserted data.")
# [END spanner_postgresql_insert_datatypes_data]
Loading

0 comments on commit 2a9182e

Please sign in to comment.