diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml new file mode 100644 index 00000000..a6e7ac0e --- /dev/null +++ b/.github/.OwlBot.lock.yaml @@ -0,0 +1,4 @@ +docker: + digest: sha256:4c981a6b6f2b8914a448d7b3a01688365be03e3ed26dfee399a6aa77fb112eaa + image: gcr.io/repo-automation-bots/owlbot-python:latest + diff --git a/.github/.OwlBot.yaml b/.github/.OwlBot.yaml new file mode 100644 index 00000000..478f625a --- /dev/null +++ b/.github/.OwlBot.yaml @@ -0,0 +1,30 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +docker: + image: gcr.io/repo-automation-bots/owlbot-python:latest + +deep-remove-regex: + - /owl-bot-staging + +deep-preserve-regex: + - /owl-bot-staging/v1alpha2 + - /owl-bot-staging/v1beta1 + +deep-copy-regex: + - source: /google/cloud/bigquery/storage/(v.*)/.*-py/(.*) + dest: /owl-bot-staging/$1/$2 + +begin-after-commit-hash: 79c15da3a71c276e23aa2746f9fa243741763179 + diff --git a/owlbot.py b/owlbot.py new file mode 100644 index 00000000..5671127f --- /dev/null +++ b/owlbot.py @@ -0,0 +1,146 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This script is used to synthesize generated parts of this library.""" + +import re + +import synthtool as s +from synthtool import gcp +from synthtool.languages import python + +common = gcp.CommonTemplates() + +default_version = "v1" + +for library in s.get_staging_dirs(default_version): + # We don't want the generated client to be accessible through + # "google.cloud.bigquery_storage", replace it with the hand written client that + # wraps it. + s.replace( + library / "google/cloud/bigquery_storage/__init__.py", + f"from google\.cloud\.bigquery_storage_{library.name}\.services.big_query_read.client import", + f"from google.cloud.bigquery_storage_{library.name} import", + ) + + # We also don't want to expose the async client just yet, at least not until + # it is wrapped in its own manual client class. + s.replace( + library / "google/cloud/bigquery_storage/__init__.py", + ( + f"from google\.cloud\.bigquery_storage_{library.name}\.services.big_query_read.async_client " + r"import BigQueryReadAsyncClient\n" + ), + "", + ) + s.replace( + library / "google/cloud/bigquery_storage/__init__.py", + r"""["']BigQueryReadAsyncClient["'],\n""", + "", + ) + + # We want types and __version__ to be accessible through the "main" library + # entry point. + s.replace( + library / "google/cloud/bigquery_storage/__init__.py", + f"from google\.cloud\.bigquery_storage_{library.name}\.types\.arrow import ArrowRecordBatch", + ( + f"from google.cloud.bigquery_storage_{library.name} import types\n" + f"from google.cloud.bigquery_storage_{library.name} import __version__\n" + "\g<0>" + ), + ) + s.replace( + library / "google/cloud/bigquery_storage/__init__.py", + r"""["']ArrowRecordBatch["']""", + ('"__version__",\n' ' "types",\n' " \g<0>"), + ) + + # We want to expose all types through "google.cloud.bigquery_storage.types", + # not just the types generated for the BQ Storage library. For example, we also + # want to include common proto types such as Timestamp. + s.replace( + library / "google/cloud/bigquery_storage/__init__.py", + r"import types", + "import gapic_types as types", + ) + + # The DataFormat enum is not exposed in bigquery_storage_v1/types, add it there. + s.replace( + library / f"google/cloud/bigquery_storage_{library.name}*/types/__init__.py", + r"from \.stream import \(", + "\g<0>\n DataFormat,", + ) + s.replace( + library / f"google/cloud/bigquery_storage_{library.name}*/types/__init__.py", + r"""["']ReadSession["']""", + '"DataFormat",\n \g<0>', + ) + + # The append_rows method doesn't contain keyword arguments that build request + # objects, so flattened tests are not needed and break with TypeError. + s.replace( + library / f'tests/unit/gapic/bigquery_storage_{library.name}*/test_big_query_write.py', + r"(@[a-z.()\n]*\n)?(async )?" + r"def test_append_rows_flattened[_a-z]*\(\):\n" + r"( {4}.*|\n)+", + '\n', + ) + + s.move( + library, + excludes=[ + "bigquery-storage-*-py.tar.gz", + "docs/conf.py", + "docs/index.rst", + f"google/cloud/bigquery_storage_{library.name}/__init__.py", + # v1beta2 was first generated after the microgenerator migration. + "scripts/fixup_bigquery_storage_v1beta2_keywords.py", + "README.rst", + "nox*.py", + "setup.py", + "setup.cfg", + ], + ) + +s.remove_staging_dirs() + +# ---------------------------------------------------------------------------- +# Add templated files +# ---------------------------------------------------------------------------- +extras = ["fastavro", "pandas", "pyarrow"] + +templated_files = common.py_library( + microgenerator=True, + samples=True, + unit_test_extras=extras, + system_test_extras=extras, + system_test_external_dependencies=["google-cloud-bigquery"], + cov_level=95, +) +s.move( + templated_files, excludes=[".coveragerc"] +) # microgenerator has a good .coveragerc file + + +# ---------------------------------------------------------------------------- +# Samples templates +# ---------------------------------------------------------------------------- + +python.py_samples(skip_readmes=True) + +# TODO(busunkim): Use latest sphinx after microgenerator transition +s.replace("noxfile.py", """['"]sphinx['"]""", '"sphinx<3.0.0"') + +s.shell.run(["nox", "-s", "blacken"], hide_output=False) \ No newline at end of file diff --git a/synth.metadata b/synth.metadata deleted file mode 100644 index 5e624baf..00000000 --- a/synth.metadata +++ /dev/null @@ -1,184 +0,0 @@ -{ - "sources": [ - { - "git": { - "name": ".", - "remote": "https://github.com/googleapis/python-bigquery-storage.git", - "sha": "0fe648449715c0591c64a2013330ecba9d125fa1" - } - }, - { - "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "23efea9fc7bedfe53b24295ed84b5f873606edcb", - "internalRef": "374220891" - } - } - ], - "destinations": [ - { - "client": { - "source": "googleapis", - "apiName": "bigquery_storage", - "apiVersion": "v1beta2", - "language": "python", - "generator": "bazel" - } - }, - { - "client": { - "source": "googleapis", - "apiName": "bigquery_storage", - "apiVersion": "v1", - "language": "python", - "generator": "bazel" - } - } - ], - "generatedFiles": [ - ".coveragerc", - ".flake8", - ".github/CONTRIBUTING.md", - ".github/ISSUE_TEMPLATE/bug_report.md", - ".github/ISSUE_TEMPLATE/feature_request.md", - ".github/ISSUE_TEMPLATE/support_request.md", - ".github/PULL_REQUEST_TEMPLATE.md", - ".github/header-checker-lint.yml", - ".github/release-please.yml", - ".github/snippet-bot.yml", - ".gitignore", - ".kokoro/build.sh", - ".kokoro/continuous/common.cfg", - ".kokoro/continuous/continuous.cfg", - ".kokoro/docker/docs/Dockerfile", - ".kokoro/docker/docs/fetch_gpg_keys.sh", - ".kokoro/docs/common.cfg", - ".kokoro/docs/docs-presubmit.cfg", - ".kokoro/docs/docs.cfg", - ".kokoro/populate-secrets.sh", - ".kokoro/presubmit/common.cfg", - ".kokoro/presubmit/presubmit.cfg", - ".kokoro/publish-docs.sh", - ".kokoro/release.sh", - ".kokoro/release/common.cfg", - ".kokoro/release/release.cfg", - ".kokoro/samples/lint/common.cfg", - ".kokoro/samples/lint/continuous.cfg", - ".kokoro/samples/lint/periodic.cfg", - ".kokoro/samples/lint/presubmit.cfg", - ".kokoro/samples/python3.6/common.cfg", - ".kokoro/samples/python3.6/continuous.cfg", - ".kokoro/samples/python3.6/periodic-head.cfg", - ".kokoro/samples/python3.6/periodic.cfg", - ".kokoro/samples/python3.6/presubmit.cfg", - ".kokoro/samples/python3.7/common.cfg", - ".kokoro/samples/python3.7/continuous.cfg", - ".kokoro/samples/python3.7/periodic-head.cfg", - ".kokoro/samples/python3.7/periodic.cfg", - ".kokoro/samples/python3.7/presubmit.cfg", - ".kokoro/samples/python3.8/common.cfg", - ".kokoro/samples/python3.8/continuous.cfg", - ".kokoro/samples/python3.8/periodic-head.cfg", - ".kokoro/samples/python3.8/periodic.cfg", - ".kokoro/samples/python3.8/presubmit.cfg", - ".kokoro/test-samples-against-head.sh", - ".kokoro/test-samples-impl.sh", - ".kokoro/test-samples.sh", - ".kokoro/trampoline.sh", - ".kokoro/trampoline_v2.sh", - ".pre-commit-config.yaml", - ".trampolinerc", - "CODE_OF_CONDUCT.md", - "CONTRIBUTING.rst", - "LICENSE", - "MANIFEST.in", - "docs/_static/custom.css", - "docs/_templates/layout.html", - "docs/bigquery_storage_v1/big_query_read.rst", - "docs/bigquery_storage_v1/services.rst", - "docs/bigquery_storage_v1/types.rst", - "docs/bigquery_storage_v1beta2/big_query_read.rst", - "docs/bigquery_storage_v1beta2/big_query_write.rst", - "docs/bigquery_storage_v1beta2/services.rst", - "docs/bigquery_storage_v1beta2/types.rst", - "docs/conf.py", - "docs/multiprocessing.rst", - "google/cloud/bigquery_storage/__init__.py", - "google/cloud/bigquery_storage/py.typed", - "google/cloud/bigquery_storage_v1/gapic_metadata.json", - "google/cloud/bigquery_storage_v1/proto/arrow.proto", - "google/cloud/bigquery_storage_v1/proto/avro.proto", - "google/cloud/bigquery_storage_v1/proto/storage.proto", - "google/cloud/bigquery_storage_v1/proto/stream.proto", - "google/cloud/bigquery_storage_v1/py.typed", - "google/cloud/bigquery_storage_v1/services/__init__.py", - "google/cloud/bigquery_storage_v1/services/big_query_read/__init__.py", - "google/cloud/bigquery_storage_v1/services/big_query_read/async_client.py", - "google/cloud/bigquery_storage_v1/services/big_query_read/client.py", - "google/cloud/bigquery_storage_v1/services/big_query_read/transports/__init__.py", - "google/cloud/bigquery_storage_v1/services/big_query_read/transports/base.py", - "google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc.py", - "google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc_asyncio.py", - "google/cloud/bigquery_storage_v1/types/__init__.py", - "google/cloud/bigquery_storage_v1/types/arrow.py", - "google/cloud/bigquery_storage_v1/types/avro.py", - "google/cloud/bigquery_storage_v1/types/storage.py", - "google/cloud/bigquery_storage_v1/types/stream.py", - "google/cloud/bigquery_storage_v1beta2/gapic_metadata.json", - "google/cloud/bigquery_storage_v1beta2/proto/arrow.proto", - "google/cloud/bigquery_storage_v1beta2/proto/avro.proto", - "google/cloud/bigquery_storage_v1beta2/proto/protobuf.proto", - "google/cloud/bigquery_storage_v1beta2/proto/storage.proto", - "google/cloud/bigquery_storage_v1beta2/proto/stream.proto", - "google/cloud/bigquery_storage_v1beta2/proto/table.proto", - "google/cloud/bigquery_storage_v1beta2/py.typed", - "google/cloud/bigquery_storage_v1beta2/services/__init__.py", - "google/cloud/bigquery_storage_v1beta2/services/big_query_read/__init__.py", - "google/cloud/bigquery_storage_v1beta2/services/big_query_read/async_client.py", - "google/cloud/bigquery_storage_v1beta2/services/big_query_read/client.py", - "google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/__init__.py", - "google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/base.py", - "google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/grpc.py", - "google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/grpc_asyncio.py", - "google/cloud/bigquery_storage_v1beta2/services/big_query_write/__init__.py", - "google/cloud/bigquery_storage_v1beta2/services/big_query_write/async_client.py", - "google/cloud/bigquery_storage_v1beta2/services/big_query_write/client.py", - "google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/__init__.py", - "google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/base.py", - "google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/grpc.py", - "google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/grpc_asyncio.py", - "google/cloud/bigquery_storage_v1beta2/types/__init__.py", - "google/cloud/bigquery_storage_v1beta2/types/arrow.py", - "google/cloud/bigquery_storage_v1beta2/types/avro.py", - "google/cloud/bigquery_storage_v1beta2/types/protobuf.py", - "google/cloud/bigquery_storage_v1beta2/types/storage.py", - "google/cloud/bigquery_storage_v1beta2/types/stream.py", - "google/cloud/bigquery_storage_v1beta2/types/table.py", - "mypy.ini", - "noxfile.py", - "renovate.json", - "samples/AUTHORING_GUIDE.md", - "samples/CONTRIBUTING.md", - "samples/quickstart/noxfile.py", - "samples/to_dataframe/noxfile.py", - "scripts/decrypt-secrets.sh", - "scripts/fixup_bigquery_storage_v1_keywords.py", - "scripts/readme-gen/readme_gen.py", - "scripts/readme-gen/templates/README.tmpl.rst", - "scripts/readme-gen/templates/auth.tmpl.rst", - "scripts/readme-gen/templates/auth_api_key.tmpl.rst", - "scripts/readme-gen/templates/install_deps.tmpl.rst", - "scripts/readme-gen/templates/install_portaudio.tmpl.rst", - "setup.cfg", - "testing/.gitignore", - "tests/__init__.py", - "tests/unit/__init__.py", - "tests/unit/gapic/__init__.py", - "tests/unit/gapic/bigquery_storage_v1/__init__.py", - "tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py", - "tests/unit/gapic/bigquery_storage_v1beta2/__init__.py", - "tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_read.py", - "tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_write.py" - ] -} \ No newline at end of file diff --git a/synth.py b/synth.py deleted file mode 100644 index 851d3a15..00000000 --- a/synth.py +++ /dev/null @@ -1,172 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This script is used to synthesize generated parts of this library.""" - -import re - -import synthtool as s -from synthtool import gcp -from synthtool.languages import python - -gapic = gcp.GAPICBazel() -common = gcp.CommonTemplates() -versions = ["v1beta2", "v1"] - -for version in versions: - library = gapic.py_library( - service="bigquery_storage", - version=version, - bazel_target=f"//google/cloud/bigquery/storage/{version}:bigquery-storage-{version}-py", - include_protos=True, - ) - - s.move( - library, - excludes=[ - "bigquery-storage-*-py.tar.gz", - "docs/conf.py", - "docs/index.rst", - f"google/cloud/bigquery_storage_{version}/__init__.py", - # v1beta2 was first generated after the microgenerator migration. - "scripts/fixup_bigquery_storage_v1beta2_keywords.py", - "README.rst", - "nox*.py", - "setup.py", - "setup.cfg", - ], - ) - - # We need to parameterize aspects of the client as it varies in different versions. - # - # In the future once the read and write client are colocated in the same version, - # we'll need to loop through through multiple clients. Perhaps by the time that - # happens we'll be on a generator that needs less post-generation modifications. - clientinfo = { - "file": "big_query_storage_client.py", - "type": "storage", - "name": "BigQueryStorageClient", - "badpkg": "google-cloud-bigquerystorage", - "goodpkg": "google-cloud-bigquery-storage", - } - if version in {"v1"}: - clientinfo = { - "file": "big_query_read_client.py", - "type": "read", - "name": "BigQueryReadClient", - "badpkg": "google-cloud-bigquerystorage", - "goodpkg": "google-cloud-bigquery-storage", - } - -# ---------------------------------------------------------------------------- -# Add templated files -# ---------------------------------------------------------------------------- -extras = ["fastavro", "pandas", "pyarrow"] - -templated_files = common.py_library( - microgenerator=True, - samples=True, - unit_test_extras=extras, - system_test_extras=extras, - system_test_external_dependencies=["google-cloud-bigquery"], - cov_level=95, -) -s.move( - templated_files, excludes=[".coveragerc"] -) # microgenerator has a good .coveragerc file - - -# ---------------------------------------------------------------------------- -# Samples templates -# ---------------------------------------------------------------------------- - -python.py_samples(skip_readmes=True) - -# We don't want the generated client to be accessible through -# "google.cloud.bigquery_storage", replace it with the hand written client that -# wraps it. -s.replace( - "google/cloud/bigquery_storage/__init__.py", - r"from google\.cloud\.bigquery_storage_v1\.services.big_query_read.client import", - "from google.cloud.bigquery_storage_v1 import", -) - -# We also don't want to expose the async client just yet, at least not until -# it is wrapped in its own manual client class. -s.replace( - "google/cloud/bigquery_storage/__init__.py", - ( - r"from google\.cloud\.bigquery_storage_v1\.services.big_query_read.async_client " - r"import BigQueryReadAsyncClient\n" - ), - "", -) -s.replace( - "google/cloud/bigquery_storage/__init__.py", - r"""["']BigQueryReadAsyncClient["'],\n""", - "", -) - -# We want types and __version__ to be accessible through the "main" library -# entry point. -s.replace( - "google/cloud/bigquery_storage/__init__.py", - r"from google\.cloud\.bigquery_storage_v1\.types\.arrow import ArrowRecordBatch", - ( - "from google.cloud.bigquery_storage_v1 import types\n" - "from google.cloud.bigquery_storage_v1 import __version__\n" - "\g<0>" - ), -) -s.replace( - "google/cloud/bigquery_storage/__init__.py", - r"""["']ArrowRecordBatch["']""", - ('"__version__",\n' ' "types",\n' " \g<0>"), -) - -# We want to expose all types through "google.cloud.bigquery_storage.types", -# not just the types generated for the BQ Storage library. For example, we also -# want to include common proto types such as Timestamp. -s.replace( - "google/cloud/bigquery_storage/__init__.py", - r"import types", - "import gapic_types as types", -) - -# The DataFormat enum is not exposed in bigquery_storage_v1/types, add it there. -s.replace( - "google/cloud/bigquery_storage_v1*/types/__init__.py", - r"from \.stream import \(", - "\g<0>\n DataFormat,", -) -s.replace( - "google/cloud/bigquery_storage_v1*/types/__init__.py", - r"""["']ReadSession["']""", - '"DataFormat",\n \g<0>', -) - -# The append_rows method doesn't contain keyword arguments that build request -# objects, so flattened tests are not needed and break with TypeError. -s.replace( - 'tests/unit/gapic/bigquery_storage_v1*/test_big_query_write.py', - r"(@[a-z.()\n]*\n)?(async )?" - r"def test_append_rows_flattened[_a-z]*\(\):\n" - r"( {4}.*|\n)+", - '\n', -) - -# TODO(busunkim): Use latest sphinx after microgenerator transition -s.replace("noxfile.py", """['"]sphinx['"]""", '"sphinx<3.0.0"') - -s.shell.run(["nox", "-s", "blacken"], hide_output=False)