From bfd7eab2bc29eebfc38fcf0710d0f593c59cf12b Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Tue, 9 Apr 2024 11:53:52 -0500 Subject: [PATCH 01/20] chore: synchronize with Python templates --- .repo-metadata.json | 16 +++---- owlbot.py | 108 ++++++++++++-------------------------------- 2 files changed, 36 insertions(+), 88 deletions(-) diff --git a/.repo-metadata.json b/.repo-metadata.json index d1be7ec..ccfd06d 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -1,17 +1,17 @@ { - "name": "bigquery", - "name_pretty": "Google Cloud BigQuery", + "name": "bigquery-magics", + "name_pretty": "Google BigQuery connector for Jupyter and IPython", "product_documentation": "https://cloud.google.com/bigquery", - "client_documentation": "https://cloud.google.com/python/docs/reference/bigquery/latest", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559654", + "client_documentation": "https://googleapis.dev/python/bigquery-magics/latest/", + "issue_tracker": "https://github.com/googleapis/python-bigquery-magics/issues", "release_level": "stable", "language": "python", - "library_type": "GAPIC_COMBO", - "repo": "googleapis/python-bigquery", - "distribution_name": "google-cloud-bigquery", + "library_type": "INTEGRATION", + "repo": "googleapis/python-bigquery-magics", + "distribution_name": "bigquery-magics", "api_id": "bigquery.googleapis.com", "requires_billing": false, - "default_version": "v2", + "default_version": "", "codeowner_team": "@googleapis/api-bigquery", "api_shortname": "bigquery", "api_description": "is a fully managed, NoOps, low cost data analytics service.\nData can be streamed into BigQuery at millions of rows per second to enable real-time analysis.\nWith BigQuery you can easily deploy Petabyte-scale Databases." diff --git a/owlbot.py b/owlbot.py index c2de310..9de031d 100644 --- a/owlbot.py +++ b/owlbot.py @@ -1,4 +1,4 @@ -# Copyright 2018 Google LLC +# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,116 +13,64 @@ # limitations under the License. """This script is used to synthesize generated parts of this library.""" -from pathlib import Path -import textwrap + +import pathlib import synthtool as s from synthtool import gcp from synthtool.languages import python -REPO_ROOT = Path(__file__).parent.absolute() - -default_version = "v2" - -for library in s.get_staging_dirs(default_version): - # Avoid breaking change due to change in field renames. - # https://github.com/googleapis/python-bigquery/issues/319 - s.replace( - library / f"google/cloud/bigquery_{library.name}/types/standard_sql.py", - r"type_ ", - "type ", - ) - # Patch docs issue - s.replace( - library / f"google/cloud/bigquery_{library.name}/types/model.py", - r"""\"predicted_\"""", - """`predicted_`""", - ) - s.move(library / f"google/cloud/bigquery_{library.name}/types") -s.remove_staging_dirs() +REPO_ROOT = pathlib.Path(__file__).parent.absolute() common = gcp.CommonTemplates() # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- + +extras_by_python = { + # Use a middle version of Python to test when no extras are installed. + "3.9": [] +} +extras = ["tqdm"] templated_files = common.py_library( - cov_level=100, - samples=True, - microgenerator=True, - split_system_tests=True, + unit_test_python_versions=["3.8", "3.9", "3.10", "3.11", "3.12"], + system_test_python_versions=["3.8", "3.9", "3.10", "3.11", "3.12"], + cov_level=96, + unit_test_external_dependencies=["freezegun"], + unit_test_extras=extras, + unit_test_extras_by_python=extras_by_python, + system_test_extras=extras, intersphinx_dependencies={ - "dateutil": "https://dateutil.readthedocs.io/en/latest/", - "geopandas": "https://geopandas.org/", "pandas": "https://pandas.pydata.org/pandas-docs/stable/", + "pydata-google-auth": "https://pydata-google-auth.readthedocs.io/en/latest/", }, ) - -# BigQuery has a custom multiprocessing note s.move( templated_files, excludes=[ - "noxfile.py", + # Multi-processing note isn't relevant, as bigquery-magics is responsible for + # creating clients, not the end user. "docs/multiprocessing.rst", - "docs/index.rst", - ".coveragerc", - ".github/CODEOWNERS", - # Include custom SNIPPETS_TESTS job for performance. - # https://github.com/googleapis/python-bigquery/issues/191 - ".kokoro/presubmit/presubmit.cfg", - ".github/workflows", # exclude gh actions as credentials are needed for tests - "README.rst", + "README.rst", ], ) -python.configure_previous_major_version_branches() # ---------------------------------------------------------------------------- -# Samples templates +# Fixup files # ---------------------------------------------------------------------------- -python.py_samples() - -s.replace( - "docs/conf.py", - r'\{"members": True\}', - '{"members": True, "inherited-members": True}', -) -s.replace( - "docs/conf.py", - r"exclude_patterns = \[", - '\\g<0>\n "google/cloud/bigquery_v2/**", # Legacy proto-based types.', -) # ---------------------------------------------------------------------------- -# pytype-related changes +# Samples templates # ---------------------------------------------------------------------------- -# Add .pytype to .gitignore -s.replace(".gitignore", r"\.pytest_cache", "\\g<0>\n.pytype") - -# Add pytype config to setup.cfg -s.replace( - "setup.cfg", - r"universal = 1", - textwrap.dedent( - """ \\g<0> +python.py_samples(skip_readmes=True) - [pytype] - python_version = 3.8 - inputs = - google/cloud/ - exclude = - tests/ - google/cloud/bigquery_v2/ # Legacy proto-based types. - output = .pytype/ - disable = - # There's some issue with finding some pyi files, thus disabling. - # The issue https://github.com/google/pytype/issues/150 is closed, but the - # error still occurs for some reason. - pyi-error""" - ), -) +# ---------------------------------------------------------------------------- +# Final cleanup +# ---------------------------------------------------------------------------- s.shell.run(["nox", "-s", "blacken"], hide_output=False) for noxfile in REPO_ROOT.glob("samples/**/noxfile.py"): - s.shell.run(["nox", "-s", "blacken"], cwd=noxfile.parent, hide_output=False) + s.shell.run(["nox", "-s", "blacken"], cwd=noxfile.parent, hide_output=False) \ No newline at end of file From 7a71d71acd73dcc4ac0250384392739a0cf9ad4a Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Tue, 9 Apr 2024 13:04:57 -0500 Subject: [PATCH 02/20] add owlbot configuration --- .github/.OwlBot.lock.yaml | 17 +++++++++++++++++ .github/.OwlBot.yaml | 18 ++++++++++++++++++ 2 files changed, 35 insertions(+) create mode 100644 .github/.OwlBot.lock.yaml create mode 100644 .github/.OwlBot.yaml diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml new file mode 100644 index 0000000..664f92b --- /dev/null +++ b/.github/.OwlBot.lock.yaml @@ -0,0 +1,17 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +docker: + image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest + digest: sha256:db05f70829de86fe8e34ba972b7fe56da57eaccf1691f875ed4867db80d5cec9 +# created: 2024-04-05T19:51:26.466869535Z \ No newline at end of file diff --git a/.github/.OwlBot.yaml b/.github/.OwlBot.yaml new file mode 100644 index 0000000..1d3c03c --- /dev/null +++ b/.github/.OwlBot.yaml @@ -0,0 +1,18 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +docker: + image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest + +begin-after-commit-hash: 1afeb53252641dc35a421fa5acc59e2f3229ad6d From 216de8241c8b3f755223194e4a5b3fad211fea0a Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Tue, 9 Apr 2024 14:21:21 -0500 Subject: [PATCH 03/20] remove unknown paths from linter --- noxfile.py | 5 +---- samples/snippets/.gitignore | 0 tests/.gitignore | 0 3 files changed, 1 insertion(+), 4 deletions(-) create mode 100644 samples/snippets/.gitignore create mode 100644 tests/.gitignore diff --git a/noxfile.py b/noxfile.py index ae02223..31824c9 100644 --- a/noxfile.py +++ b/noxfile.py @@ -26,11 +26,8 @@ PYTYPE_VERSION = "pytype==2021.4.9" BLACK_VERSION = "black==23.7.0" BLACK_PATHS = ( - "benchmark", "docs", - "google", - "samples", - "samples/tests", + "bigquery_magics", "tests", "noxfile.py", "setup.py", diff --git a/samples/snippets/.gitignore b/samples/snippets/.gitignore new file mode 100644 index 0000000..e69de29 diff --git a/tests/.gitignore b/tests/.gitignore new file mode 100644 index 0000000..e69de29 From 9d63abb5e5b1d11ca3fee1186d9f1ffbc1a2ea98 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Tue, 9 Apr 2024 14:29:42 -0500 Subject: [PATCH 04/20] use templated noxfile --- owlbot.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/owlbot.py b/owlbot.py index 9de031d..4d7f873 100644 --- a/owlbot.py +++ b/owlbot.py @@ -60,6 +60,20 @@ # Fixup files # ---------------------------------------------------------------------------- +s.replace( + ["noxfile.py"], r"[\"']google[\"']", '"bigquery_magics"', +) + + +s.replace( + ["noxfile.py"], "--cov=google", "--cov=bigquery_magics", +) + + +# Workaround for https://github.com/googleapis/synthtool/issues/1317 +s.replace( + ["noxfile.py"], r'extras = "\[\]"', 'extras = ""', +) # ---------------------------------------------------------------------------- # Samples templates From 1191a22d34483974a21f49e7aa3cdc92d6dabf15 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 9 Apr 2024 19:31:29 +0000 Subject: [PATCH 05/20] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20?= =?UTF-8?q?post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- .coveragerc | 33 +- .github/CODEOWNERS | 12 + .github/CONTRIBUTING.md | 28 + .github/ISSUE_TEMPLATE/bug_report.md | 43 ++ .github/ISSUE_TEMPLATE/feature_request.md | 18 + .github/ISSUE_TEMPLATE/support_request.md | 7 + .github/PULL_REQUEST_TEMPLATE.md | 7 + .github/auto-approve.yml | 3 + .github/auto-label.yaml | 20 + .github/blunderbuss.yml | 17 + .github/header-checker-lint.yml | 15 + .github/release-please.yml | 2 + .github/release-trigger.yml | 1 + .github/snippet-bot.yml | 0 .github/workflows/docs.yml | 38 ++ .github/workflows/lint.yml | 25 + .github/workflows/unittest.yml | 57 ++ .gitignore | 1 - .kokoro/build.sh | 9 +- .kokoro/continuous/common.cfg | 4 +- .kokoro/docker/docs/Dockerfile | 4 + .kokoro/docker/docs/requirements.in | 1 + .kokoro/docker/docs/requirements.txt | 38 ++ .kokoro/docs/common.cfg | 9 +- .kokoro/docs/docs-presubmit.cfg | 2 +- .kokoro/presubmit/common.cfg | 4 +- .kokoro/presubmit/presubmit.cfg | 12 +- .kokoro/release.sh | 4 +- .kokoro/release/common.cfg | 8 +- .kokoro/requirements.in | 3 +- .kokoro/requirements.txt | 105 ++-- .kokoro/samples/lint/common.cfg | 4 +- .kokoro/samples/python3.10/common.cfg | 4 +- .kokoro/samples/python3.10/periodic-head.cfg | 2 +- .kokoro/samples/python3.11/common.cfg | 4 +- .kokoro/samples/python3.11/periodic-head.cfg | 2 +- .kokoro/samples/python3.12/common.cfg | 4 +- .kokoro/samples/python3.12/periodic-head.cfg | 2 +- .kokoro/samples/python3.7/common.cfg | 4 +- .kokoro/samples/python3.7/periodic-head.cfg | 2 +- .kokoro/samples/python3.8/common.cfg | 4 +- .kokoro/samples/python3.8/periodic-head.cfg | 2 +- .kokoro/samples/python3.9/common.cfg | 4 +- .kokoro/samples/python3.9/periodic-head.cfg | 2 +- CONTRIBUTING.rst | 36 +- docs/conf.py | 35 +- noxfile.py | 595 ++++++++---------- scripts/decrypt-secrets.sh | 46 ++ scripts/readme-gen/readme_gen.py | 69 ++ scripts/readme-gen/templates/README.tmpl.rst | 87 +++ scripts/readme-gen/templates/auth.tmpl.rst | 9 + .../templates/auth_api_key.tmpl.rst | 14 + .../templates/install_deps.tmpl.rst | 29 + .../templates/install_portaudio.tmpl.rst | 35 ++ setup.cfg | 14 - 55 files changed, 1059 insertions(+), 480 deletions(-) create mode 100644 .github/CODEOWNERS create mode 100644 .github/CONTRIBUTING.md create mode 100644 .github/ISSUE_TEMPLATE/bug_report.md create mode 100644 .github/ISSUE_TEMPLATE/feature_request.md create mode 100644 .github/ISSUE_TEMPLATE/support_request.md create mode 100644 .github/PULL_REQUEST_TEMPLATE.md create mode 100644 .github/auto-approve.yml create mode 100644 .github/auto-label.yaml create mode 100644 .github/blunderbuss.yml create mode 100644 .github/header-checker-lint.yml create mode 100644 .github/release-please.yml create mode 100644 .github/release-trigger.yml create mode 100644 .github/snippet-bot.yml create mode 100644 .github/workflows/docs.yml create mode 100644 .github/workflows/lint.yml create mode 100644 .github/workflows/unittest.yml create mode 100644 .kokoro/docker/docs/requirements.in create mode 100644 .kokoro/docker/docs/requirements.txt create mode 100755 scripts/decrypt-secrets.sh create mode 100644 scripts/readme-gen/readme_gen.py create mode 100644 scripts/readme-gen/templates/README.tmpl.rst create mode 100644 scripts/readme-gen/templates/auth.tmpl.rst create mode 100644 scripts/readme-gen/templates/auth_api_key.tmpl.rst create mode 100644 scripts/readme-gen/templates/install_deps.tmpl.rst create mode 100644 scripts/readme-gen/templates/install_portaudio.tmpl.rst diff --git a/.coveragerc b/.coveragerc index 0409225..6948e4d 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,14 +1,39 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! [run] branch = True +omit = + google/__init__.py + google/cloud/__init__.py [report] -fail_under = 100 +fail_under = 96 show_missing = True -omit = - google/cloud/bigquery/__init__.py - google/cloud/bigquery_v2/* # Legacy proto-based types. exclude_lines = # Re-enable the standard pragma pragma: NO COVER # Ignore debug-only repr def __repr__ + # Ignore abstract methods + raise NotImplementedError +omit = + */gapic/*.py + */proto/*.py + */core/*.py + */site-packages/*.py + google/cloud/__init__.py diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000..193b436 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,12 @@ +# Code owners file. +# This file controls who is tagged for review for any given pull request. +# +# For syntax help see: +# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax +# Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. + +# @googleapis/yoshi-python @googleapis/api-bigquery are the default owners for changes in this repo +* @googleapis/yoshi-python @googleapis/api-bigquery + +# @googleapis/python-samples-reviewers @googleapis/api-bigquery are the default owners for samples changes +/samples/ @googleapis/python-samples-reviewers @googleapis/api-bigquery diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md new file mode 100644 index 0000000..939e534 --- /dev/null +++ b/.github/CONTRIBUTING.md @@ -0,0 +1,28 @@ +# How to Contribute + +We'd love to accept your patches and contributions to this project. There are +just a few small guidelines you need to follow. + +## Contributor License Agreement + +Contributions to this project must be accompanied by a Contributor License +Agreement. You (or your employer) retain the copyright to your contribution; +this simply gives us permission to use and redistribute your contributions as +part of the project. Head over to to see +your current agreements on file or to sign a new one. + +You generally only need to submit a CLA once, so if you've already submitted one +(even if it was for a different project), you probably don't need to do it +again. + +## Code reviews + +All submissions, including submissions by project members, require review. We +use GitHub pull requests for this purpose. Consult +[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more +information on using pull requests. + +## Community Guidelines + +This project follows [Google's Open Source Community +Guidelines](https://opensource.google.com/conduct/). diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000..681739f --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,43 @@ +--- +name: Bug report +about: Create a report to help us improve + +--- + +Thanks for stopping by to let us know something could be better! + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. + +Please run down the following list and make sure you've tried the usual "quick fixes": + + - Search the issues already opened: https://github.com/googleapis/python-bigquery-magics/issues + - Search StackOverflow: https://stackoverflow.com/questions/tagged/google-cloud-platform+python + +If you are still having issues, please be sure to include as much information as possible: + +#### Environment details + + - OS type and version: + - Python version: `python --version` + - pip version: `pip --version` + - `bigquery-magics` version: `pip show bigquery-magics` + +#### Steps to reproduce + + 1. ? + 2. ? + +#### Code example + +```python +# example +``` + +#### Stack trace +``` +# example +``` + +Making sure to follow these steps will guarantee the quickest resolution possible. + +Thanks! diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000..6365857 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,18 @@ +--- +name: Feature request +about: Suggest an idea for this library + +--- + +Thanks for stopping by to let us know something could be better! + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. + + **Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + **Describe the solution you'd like** +A clear and concise description of what you want to happen. + **Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + **Additional context** +Add any other context or screenshots about the feature request here. diff --git a/.github/ISSUE_TEMPLATE/support_request.md b/.github/ISSUE_TEMPLATE/support_request.md new file mode 100644 index 0000000..9958690 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/support_request.md @@ -0,0 +1,7 @@ +--- +name: Support request +about: If you have a support contract with Google, please create an issue in the Google Cloud Support console. + +--- + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000..a576c2e --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,7 @@ +Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: +- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-bigquery-magics/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea +- [ ] Ensure the tests and linter pass +- [ ] Code coverage does not decrease (if any source code was changed) +- [ ] Appropriate docs were updated (if necessary) + +Fixes # 🦕 diff --git a/.github/auto-approve.yml b/.github/auto-approve.yml new file mode 100644 index 0000000..311ebbb --- /dev/null +++ b/.github/auto-approve.yml @@ -0,0 +1,3 @@ +# https://github.com/googleapis/repo-automation-bots/tree/main/packages/auto-approve +processes: + - "OwlBotTemplateChanges" diff --git a/.github/auto-label.yaml b/.github/auto-label.yaml new file mode 100644 index 0000000..8b37ee8 --- /dev/null +++ b/.github/auto-label.yaml @@ -0,0 +1,20 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +requestsize: + enabled: true + +path: + pullrequest: true + paths: + samples: "samples" diff --git a/.github/blunderbuss.yml b/.github/blunderbuss.yml new file mode 100644 index 0000000..5b7383d --- /dev/null +++ b/.github/blunderbuss.yml @@ -0,0 +1,17 @@ +# Blunderbuss config +# +# This file controls who is assigned for pull requests and issues. +# Note: This file is autogenerated. To make changes to the assignee +# team, please update `codeowner_team` in `.repo-metadata.json`. +assign_issues: + - googleapis/api-bigquery + +assign_issues_by: + - labels: + - "samples" + to: + - googleapis/python-samples-reviewers + - googleapis/api-bigquery + +assign_prs: + - googleapis/api-bigquery diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml new file mode 100644 index 0000000..6fe78aa --- /dev/null +++ b/.github/header-checker-lint.yml @@ -0,0 +1,15 @@ +{"allowedCopyrightHolders": ["Google LLC"], + "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], + "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"], + "sourceFileExtensions": [ + "ts", + "js", + "java", + "sh", + "Dockerfile", + "yaml", + "py", + "html", + "txt" + ] +} \ No newline at end of file diff --git a/.github/release-please.yml b/.github/release-please.yml new file mode 100644 index 0000000..466597e --- /dev/null +++ b/.github/release-please.yml @@ -0,0 +1,2 @@ +releaseType: python +handleGHRelease: true diff --git a/.github/release-trigger.yml b/.github/release-trigger.yml new file mode 100644 index 0000000..d4ca941 --- /dev/null +++ b/.github/release-trigger.yml @@ -0,0 +1 @@ +enabled: true diff --git a/.github/snippet-bot.yml b/.github/snippet-bot.yml new file mode 100644 index 0000000..e69de29 diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 0000000..698fbc5 --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,38 @@ +on: + pull_request: + branches: + - main +name: docs +jobs: + docs: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.9" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run docs + run: | + nox -s docs + docfx: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run docfx + run: | + nox -s docfx diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 0000000..4866193 --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,25 @@ +on: + pull_request: + branches: + - main +name: lint +jobs: + lint: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.8" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run lint + run: | + nox -s lint + - name: Run lint_setup_py + run: | + nox -s lint_setup_py diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml new file mode 100644 index 0000000..3c11914 --- /dev/null +++ b/.github/workflows/unittest.yml @@ -0,0 +1,57 @@ +on: + pull_request: + branches: + - main +name: unittest +jobs: + unit: + runs-on: ubuntu-latest + strategy: + matrix: + python: ['3.8', '3.9', '3.10', '3.11', '3.12'] + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python }} + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run unit tests + env: + COVERAGE_FILE: .coverage-${{ matrix.python }} + run: | + nox -s unit-${{ matrix.python }} + - name: Upload coverage results + uses: actions/upload-artifact@v4 + with: + name: coverage-artifact-${{ matrix.python }} + path: .coverage-${{ matrix.python }} + + cover: + runs-on: ubuntu-latest + needs: + - unit + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.8" + - name: Install coverage + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install coverage + - name: Download coverage results + uses: actions/download-artifact@v4 + with: + path: .coverage-results/ + - name: Report coverage results + run: | + find .coverage-results -type f -name '*.zip' -exec unzip {} \; + coverage combine .coverage-results/**/.coverage* + coverage report --show-missing --fail-under=96 diff --git a/.gitignore b/.gitignore index 168b201..d083ea1 100644 --- a/.gitignore +++ b/.gitignore @@ -29,7 +29,6 @@ pip-log.txt .nox .cache .pytest_cache -.pytype # Mac diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 0cb0d0d..bc5509a 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -16,7 +16,7 @@ set -eo pipefail if [[ -z "${PROJECT_ROOT:-}" ]]; then - PROJECT_ROOT="github/python-bigquery" + PROJECT_ROOT="github/python-bigquery-magics" fi cd "${PROJECT_ROOT}" @@ -33,13 +33,6 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json # Setup project id. export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") -# Remove old nox -python3 -m pip uninstall --yes --quiet nox-automation - -# Install nox -python3 -m pip install --upgrade --quiet nox -python3 -m nox --version - # If this is a continuous build, send the test log to the FlakyBot. # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then diff --git a/.kokoro/continuous/common.cfg b/.kokoro/continuous/common.cfg index 1f46f62..bdccd73 100644 --- a/.kokoro/continuous/common.cfg +++ b/.kokoro/continuous/common.cfg @@ -14,7 +14,7 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" # Use the trampoline script to run in docker. -build_file: "python-bigquery/.kokoro/trampoline.sh" +build_file: "python-bigquery-magics/.kokoro/trampoline.sh" # Configure the docker image for kokoro-trampoline. env_vars: { @@ -23,5 +23,5 @@ env_vars: { } env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/build.sh" + value: "github/python-bigquery-magics/.kokoro/build.sh" } diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index 8e39a2c..bdaf39f 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -80,4 +80,8 @@ RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ # Test pip RUN python3 -m pip +# Install build requirements +COPY requirements.txt /requirements.txt +RUN python3 -m pip install --require-hashes -r requirements.txt + CMD ["python3.8"] diff --git a/.kokoro/docker/docs/requirements.in b/.kokoro/docker/docs/requirements.in new file mode 100644 index 0000000..816817c --- /dev/null +++ b/.kokoro/docker/docs/requirements.in @@ -0,0 +1 @@ +nox diff --git a/.kokoro/docker/docs/requirements.txt b/.kokoro/docker/docs/requirements.txt new file mode 100644 index 0000000..0e5d70f --- /dev/null +++ b/.kokoro/docker/docs/requirements.txt @@ -0,0 +1,38 @@ +# +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: +# +# pip-compile --allow-unsafe --generate-hashes requirements.in +# +argcomplete==3.2.3 \ + --hash=sha256:bf7900329262e481be5a15f56f19736b376df6f82ed27576fa893652c5de6c23 \ + --hash=sha256:c12355e0494c76a2a7b73e3a59b09024ca0ba1e279fb9ed6c1b82d5b74b6a70c + # via nox +colorlog==6.8.2 \ + --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ + --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 + # via nox +distlib==0.3.8 \ + --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ + --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 + # via virtualenv +filelock==3.13.1 \ + --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ + --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c + # via virtualenv +nox==2024.3.2 \ + --hash=sha256:e53514173ac0b98dd47585096a55572fe504fecede58ced708979184d05440be \ + --hash=sha256:f521ae08a15adbf5e11f16cb34e8d0e6ea521e0b92868f684e91677deb974553 + # via -r requirements.in +packaging==24.0 \ + --hash=sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5 \ + --hash=sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9 + # via nox +platformdirs==4.2.0 \ + --hash=sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068 \ + --hash=sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768 + # via virtualenv +virtualenv==20.25.1 \ + --hash=sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a \ + --hash=sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197 + # via nox diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg index 41b86fc..dc9cdf0 100644 --- a/.kokoro/docs/common.cfg +++ b/.kokoro/docs/common.cfg @@ -11,7 +11,7 @@ action { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery/.kokoro/trampoline_v2.sh" +build_file: "python-bigquery-magics/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { @@ -20,7 +20,7 @@ env_vars: { } env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/publish-docs.sh" + value: "github/python-bigquery-magics/.kokoro/publish-docs.sh" } env_vars: { @@ -30,8 +30,9 @@ env_vars: { env_vars: { key: "V2_STAGING_BUCKET" - # Push google cloud library docs to the Cloud RAD bucket `docs-staging-v2` - value: "docs-staging-v2" + # Push non-cloud library docs to `docs-staging-v2-staging` instead of the + # Cloud RAD bucket `docs-staging-v2` + value: "docs-staging-v2-staging" } # It will upload the docker image after successful builds. diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg index 08adb2e..7fd6950 100644 --- a/.kokoro/docs/docs-presubmit.cfg +++ b/.kokoro/docs/docs-presubmit.cfg @@ -18,7 +18,7 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/build.sh" + value: "github/python-bigquery-magics/.kokoro/build.sh" } # Only run this nox session. diff --git a/.kokoro/presubmit/common.cfg b/.kokoro/presubmit/common.cfg index 1f46f62..bdccd73 100644 --- a/.kokoro/presubmit/common.cfg +++ b/.kokoro/presubmit/common.cfg @@ -14,7 +14,7 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" # Use the trampoline script to run in docker. -build_file: "python-bigquery/.kokoro/trampoline.sh" +build_file: "python-bigquery-magics/.kokoro/trampoline.sh" # Configure the docker image for kokoro-trampoline. env_vars: { @@ -23,5 +23,5 @@ env_vars: { } env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/build.sh" + value: "github/python-bigquery-magics/.kokoro/build.sh" } diff --git a/.kokoro/presubmit/presubmit.cfg b/.kokoro/presubmit/presubmit.cfg index 17d071c..8f43917 100644 --- a/.kokoro/presubmit/presubmit.cfg +++ b/.kokoro/presubmit/presubmit.cfg @@ -1,11 +1 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Disable system tests. -env_vars: { - key: "RUN_SYSTEM_TESTS" - value: "false" -} -env_vars: { - key: "RUN_SNIPPETS_TESTS" - value: "false" -} +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/.kokoro/release.sh b/.kokoro/release.sh index 078fc1c..9703b70 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -16,7 +16,7 @@ set -eo pipefail # Start the releasetool reporter -python3 -m pip install --require-hashes -r github/python-bigquery/.kokoro/requirements.txt +python3 -m pip install --require-hashes -r github/python-bigquery-magics/.kokoro/requirements.txt python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script # Disable buffering, so that the logs stream through. @@ -24,6 +24,6 @@ export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1") -cd github/python-bigquery +cd github/python-bigquery-magics python3 setup.py sdist bdist_wheel twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index cb8bbaa..58691a1 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -11,7 +11,7 @@ action { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery/.kokoro/trampoline.sh" +build_file: "python-bigquery-magics/.kokoro/trampoline.sh" # Configure the docker image for kokoro-trampoline. env_vars: { @@ -20,7 +20,7 @@ env_vars: { } env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/release.sh" + value: "github/python-bigquery-magics/.kokoro/release.sh" } # Fetch PyPI password @@ -43,7 +43,7 @@ env_vars: { # what we published, which we can use to generate SBOMs and attestations. action { define_artifacts { - regex: "github/python-bigquery/**/*.tar.gz" - strip_prefix: "github/python-bigquery" + regex: "github/python-bigquery-magics/**/*.tar.gz" + strip_prefix: "github/python-bigquery-magics" } } diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in index ec867d9..fff4d9c 100644 --- a/.kokoro/requirements.in +++ b/.kokoro/requirements.in @@ -1,5 +1,5 @@ gcp-docuploader -gcp-releasetool>=1.10.5 # required for compatibility with cryptography>=39.x +gcp-releasetool>=2 # required for compatibility with cryptography>=42.x importlib-metadata typing-extensions twine @@ -8,3 +8,4 @@ setuptools nox>=2022.11.21 # required to remove dependency on py charset-normalizer<3 click<8.1.0 +cryptography>=42.0.5 diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index bb3d6ca..dd61f5f 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -93,31 +93,41 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==41.0.6 \ - --hash=sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596 \ - --hash=sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c \ - --hash=sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660 \ - --hash=sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4 \ - --hash=sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead \ - --hash=sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed \ - --hash=sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3 \ - --hash=sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7 \ - --hash=sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09 \ - --hash=sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c \ - --hash=sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43 \ - --hash=sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65 \ - --hash=sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6 \ - --hash=sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da \ - --hash=sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c \ - --hash=sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b \ - --hash=sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8 \ - --hash=sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c \ - --hash=sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d \ - --hash=sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9 \ - --hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \ - --hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \ - --hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae +cryptography==42.0.5 \ + --hash=sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee \ + --hash=sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576 \ + --hash=sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d \ + --hash=sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30 \ + --hash=sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413 \ + --hash=sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb \ + --hash=sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da \ + --hash=sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4 \ + --hash=sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd \ + --hash=sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc \ + --hash=sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8 \ + --hash=sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1 \ + --hash=sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc \ + --hash=sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e \ + --hash=sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8 \ + --hash=sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940 \ + --hash=sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400 \ + --hash=sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7 \ + --hash=sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16 \ + --hash=sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278 \ + --hash=sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74 \ + --hash=sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec \ + --hash=sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1 \ + --hash=sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2 \ + --hash=sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c \ + --hash=sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922 \ + --hash=sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a \ + --hash=sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6 \ + --hash=sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1 \ + --hash=sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e \ + --hash=sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac \ + --hash=sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7 # via + # -r requirements.in # gcp-releasetool # secretstorage distlib==0.3.7 \ @@ -136,9 +146,9 @@ gcp-docuploader==0.6.5 \ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==1.16.0 \ - --hash=sha256:27bf19d2e87aaa884096ff941aa3c592c482be3d6a2bfe6f06afafa6af2353e3 \ - --hash=sha256:a316b197a543fd036209d0caba7a8eb4d236d8e65381c80cbc6d7efaa7606d63 +gcp-releasetool==2.0.0 \ + --hash=sha256:3d73480b50ba243f22d7c7ec08b115a30e1c7817c4899781840c26f9c55b8277 \ + --hash=sha256:7aa9fd935ec61e581eb8458ad00823786d91756c25e492f372b2b30962f3c28f # via -r requirements.in google-api-core==2.12.0 \ --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ @@ -383,29 +393,18 @@ platformdirs==3.11.0 \ --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e # via virtualenv -protobuf==3.20.3 \ - --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ - --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ - --hash=sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2 \ - --hash=sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b \ - --hash=sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050 \ - --hash=sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9 \ - --hash=sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7 \ - --hash=sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454 \ - --hash=sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480 \ - --hash=sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469 \ - --hash=sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c \ - --hash=sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e \ - --hash=sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db \ - --hash=sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905 \ - --hash=sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b \ - --hash=sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86 \ - --hash=sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4 \ - --hash=sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402 \ - --hash=sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7 \ - --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ - --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ - --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee +protobuf==4.25.3 \ + --hash=sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4 \ + --hash=sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8 \ + --hash=sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c \ + --hash=sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d \ + --hash=sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4 \ + --hash=sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa \ + --hash=sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c \ + --hash=sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019 \ + --hash=sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9 \ + --hash=sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c \ + --hash=sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2 # via # gcp-docuploader # gcp-releasetool @@ -509,7 +508,7 @@ zipp==3.17.0 \ # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==68.2.2 \ - --hash=sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87 \ - --hash=sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a +setuptools==69.2.0 \ + --hash=sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e \ + --hash=sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c # via -r requirements.in diff --git a/.kokoro/samples/lint/common.cfg b/.kokoro/samples/lint/common.cfg index 153746c..8a0c6f4 100644 --- a/.kokoro/samples/lint/common.cfg +++ b/.kokoro/samples/lint/common.cfg @@ -15,7 +15,7 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/test-samples.sh" + value: "github/python-bigquery-magics/.kokoro/test-samples.sh" } # Configure the docker image for kokoro-trampoline. @@ -31,4 +31,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery/.kokoro/trampoline_v2.sh" \ No newline at end of file +build_file: "python-bigquery-magics/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.10/common.cfg b/.kokoro/samples/python3.10/common.cfg index da4003d..c6437eb 100644 --- a/.kokoro/samples/python3.10/common.cfg +++ b/.kokoro/samples/python3.10/common.cfg @@ -21,7 +21,7 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/test-samples.sh" + value: "github/python-bigquery-magics/.kokoro/test-samples.sh" } # Configure the docker image for kokoro-trampoline. @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery/.kokoro/trampoline_v2.sh" \ No newline at end of file +build_file: "python-bigquery-magics/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.10/periodic-head.cfg b/.kokoro/samples/python3.10/periodic-head.cfg index 5aa01ba..47e8ef9 100644 --- a/.kokoro/samples/python3.10/periodic-head.cfg +++ b/.kokoro/samples/python3.10/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/test-samples-against-head.sh" + value: "github/python-bigquery-magics/.kokoro/test-samples-against-head.sh" } diff --git a/.kokoro/samples/python3.11/common.cfg b/.kokoro/samples/python3.11/common.cfg index f5adc87..3162e80 100644 --- a/.kokoro/samples/python3.11/common.cfg +++ b/.kokoro/samples/python3.11/common.cfg @@ -21,7 +21,7 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/test-samples.sh" + value: "github/python-bigquery-magics/.kokoro/test-samples.sh" } # Configure the docker image for kokoro-trampoline. @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery/.kokoro/trampoline_v2.sh" \ No newline at end of file +build_file: "python-bigquery-magics/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.11/periodic-head.cfg b/.kokoro/samples/python3.11/periodic-head.cfg index 5aa01ba..47e8ef9 100644 --- a/.kokoro/samples/python3.11/periodic-head.cfg +++ b/.kokoro/samples/python3.11/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/test-samples-against-head.sh" + value: "github/python-bigquery-magics/.kokoro/test-samples-against-head.sh" } diff --git a/.kokoro/samples/python3.12/common.cfg b/.kokoro/samples/python3.12/common.cfg index 6eb699e..08b8a66 100644 --- a/.kokoro/samples/python3.12/common.cfg +++ b/.kokoro/samples/python3.12/common.cfg @@ -21,7 +21,7 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/test-samples.sh" + value: "github/python-bigquery-magics/.kokoro/test-samples.sh" } # Configure the docker image for kokoro-trampoline. @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery/.kokoro/trampoline_v2.sh" \ No newline at end of file +build_file: "python-bigquery-magics/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.12/periodic-head.cfg b/.kokoro/samples/python3.12/periodic-head.cfg index 5aa01ba..47e8ef9 100644 --- a/.kokoro/samples/python3.12/periodic-head.cfg +++ b/.kokoro/samples/python3.12/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/test-samples-against-head.sh" + value: "github/python-bigquery-magics/.kokoro/test-samples-against-head.sh" } diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg index d30dc60..bcdc172 100644 --- a/.kokoro/samples/python3.7/common.cfg +++ b/.kokoro/samples/python3.7/common.cfg @@ -21,7 +21,7 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/test-samples.sh" + value: "github/python-bigquery-magics/.kokoro/test-samples.sh" } # Configure the docker image for kokoro-trampoline. @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery/.kokoro/trampoline_v2.sh" \ No newline at end of file +build_file: "python-bigquery-magics/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.7/periodic-head.cfg b/.kokoro/samples/python3.7/periodic-head.cfg index 5aa01ba..47e8ef9 100644 --- a/.kokoro/samples/python3.7/periodic-head.cfg +++ b/.kokoro/samples/python3.7/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/test-samples-against-head.sh" + value: "github/python-bigquery-magics/.kokoro/test-samples-against-head.sh" } diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg index 46759c6..dfa6328 100644 --- a/.kokoro/samples/python3.8/common.cfg +++ b/.kokoro/samples/python3.8/common.cfg @@ -21,7 +21,7 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/test-samples.sh" + value: "github/python-bigquery-magics/.kokoro/test-samples.sh" } # Configure the docker image for kokoro-trampoline. @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery/.kokoro/trampoline_v2.sh" \ No newline at end of file +build_file: "python-bigquery-magics/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.8/periodic-head.cfg b/.kokoro/samples/python3.8/periodic-head.cfg index 5aa01ba..47e8ef9 100644 --- a/.kokoro/samples/python3.8/periodic-head.cfg +++ b/.kokoro/samples/python3.8/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/test-samples-against-head.sh" + value: "github/python-bigquery-magics/.kokoro/test-samples-against-head.sh" } diff --git a/.kokoro/samples/python3.9/common.cfg b/.kokoro/samples/python3.9/common.cfg index 58d56ce..a4cb02c 100644 --- a/.kokoro/samples/python3.9/common.cfg +++ b/.kokoro/samples/python3.9/common.cfg @@ -21,7 +21,7 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/test-samples.sh" + value: "github/python-bigquery-magics/.kokoro/test-samples.sh" } # Configure the docker image for kokoro-trampoline. @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery/.kokoro/trampoline_v2.sh" \ No newline at end of file +build_file: "python-bigquery-magics/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.9/periodic-head.cfg b/.kokoro/samples/python3.9/periodic-head.cfg index 5aa01ba..47e8ef9 100644 --- a/.kokoro/samples/python3.9/periodic-head.cfg +++ b/.kokoro/samples/python3.9/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/test-samples-against-head.sh" + value: "github/python-bigquery-magics/.kokoro/test-samples-against-head.sh" } diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 7be61e6..e62de39 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. + 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -35,21 +35,21 @@ Using a Development Checkout You'll have to create a development environment using a Git checkout: - While logged into your GitHub account, navigate to the - ``python-bigquery`` `repo`_ on GitHub. + ``python-bigquery-magics`` `repo`_ on GitHub. -- Fork and clone the ``python-bigquery`` repository to your GitHub account by +- Fork and clone the ``python-bigquery-magics`` repository to your GitHub account by clicking the "Fork" button. -- Clone your fork of ``python-bigquery`` from your GitHub account to your local +- Clone your fork of ``python-bigquery-magics`` from your GitHub account to your local computer, substituting your account username and specifying the destination - as ``hack-on-python-bigquery``. E.g.:: + as ``hack-on-python-bigquery-magics``. E.g.:: $ cd ${HOME} - $ git clone git@github.com:USERNAME/python-bigquery.git hack-on-python-bigquery - $ cd hack-on-python-bigquery - # Configure remotes such that you can pull changes from the googleapis/python-bigquery + $ git clone git@github.com:USERNAME/python-bigquery-magics.git hack-on-python-bigquery-magics + $ cd hack-on-python-bigquery-magics + # Configure remotes such that you can pull changes from the googleapis/python-bigquery-magics # repository into your local repository. - $ git remote add upstream git@github.com:googleapis/python-bigquery.git + $ git remote add upstream git@github.com:googleapis/python-bigquery-magics.git # fetch and merge changes from upstream into main $ git fetch upstream $ git merge upstream/main @@ -60,7 +60,7 @@ repo, from which you can submit a pull request. To work on the codebase and run the tests, we recommend using ``nox``, but you can also use a ``virtualenv`` of your own creation. -.. _repo: https://github.com/googleapis/python-bigquery +.. _repo: https://github.com/googleapis/python-bigquery-magics Using ``nox`` ============= @@ -113,7 +113,7 @@ Coding Style export GOOGLE_CLOUD_TESTING_BRANCH="main" By doing this, you are specifying the location of the most up-to-date - version of ``python-bigquery``. The + version of ``python-bigquery-magics``. The remote name ``upstream`` should point to the official ``googleapis`` checkout and the branch should be the default branch on that remote (``main``). @@ -143,12 +143,12 @@ Running System Tests $ nox -s system # Run a single system test - $ nox -s system-3.8 -- -k + $ nox -s system-3.12 -- -k .. note:: - System tests are only configured to run under Python 3.8. + System tests are only configured to run under Python 3.8, 3.9, 3.10, 3.11 and 3.12. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local @@ -209,10 +209,10 @@ The `description on PyPI`_ for the project comes directly from the ``README``. Due to the reStructuredText (``rst``) parser used by PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` instead of -``https://github.com/googleapis/python-bigquery/blob/main/CONTRIBUTING.rst``) +``https://github.com/googleapis/python-bigquery-magics/blob/main/CONTRIBUTING.rst``) may cause problems creating links or rendering the description. -.. _description on PyPI: https://pypi.org/project/google-cloud-bigquery +.. _description on PyPI: https://pypi.org/project/bigquery-magics ************************* @@ -221,14 +221,12 @@ Supported Python Versions We support: -- `Python 3.7`_ - `Python 3.8`_ - `Python 3.9`_ - `Python 3.10`_ - `Python 3.11`_ - `Python 3.12`_ -.. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ .. _Python 3.9: https://docs.python.org/3.9/ .. _Python 3.10: https://docs.python.org/3.10/ @@ -238,10 +236,10 @@ We support: Supported versions can be found in our ``noxfile.py`` `config`_. -.. _config: https://github.com/googleapis/python-bigquery/blob/main/noxfile.py +.. _config: https://github.com/googleapis/python-bigquery-magics/blob/main/noxfile.py -We also explicitly decided to support Python 3 beginning with version 3.7. +We also explicitly decided to support Python 3 beginning with version 3.8. Reasons for this include: - Encouraging use of newest versions of Python 3 diff --git a/docs/conf.py b/docs/conf.py index d0468e2..371bcff 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -# google-cloud-bigquery documentation build configuration file +# bigquery-magics documentation build configuration file # # This file is execfile()d with the current directory set to its # containing dir. @@ -61,7 +61,7 @@ # autodoc/autosummary flags autoclass_content = "both" -autodoc_default_options = {"members": True, "inherited-members": True} +autodoc_default_options = {"members": True} autosummary_generate = True @@ -80,7 +80,7 @@ root_doc = "index" # General information about the project. -project = "google-cloud-bigquery" +project = "bigquery-magics" copyright = "2019, Google" author = "Google APIs" @@ -109,7 +109,6 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [ - "google/cloud/bigquery_v2/**", # Legacy proto-based types. "_build", "**/.nox/**/*", "samples/AUTHORING_GUIDE.md", @@ -155,9 +154,9 @@ # further. For a list of options available for each theme, see the # documentation. html_theme_options = { - "description": "Google Cloud Client Libraries for google-cloud-bigquery", + "description": "Google Cloud Client Libraries for bigquery-magics", "github_user": "googleapis", - "github_repo": "python-bigquery", + "github_repo": "python-bigquery-magics", "github_banner": True, "font_family": "'Roboto', Georgia, sans", "head_font_family": "'Roboto', Georgia, serif", @@ -249,7 +248,7 @@ # html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-bigquery-doc" +htmlhelp_basename = "bigquery-magics-doc" # -- Options for warnings ------------------------------------------------------ @@ -282,8 +281,8 @@ latex_documents = [ ( root_doc, - "google-cloud-bigquery.tex", - "google-cloud-bigquery Documentation", + "bigquery-magics.tex", + "bigquery-magics Documentation", author, "manual", ) @@ -317,8 +316,8 @@ man_pages = [ ( root_doc, - "google-cloud-bigquery", - "google-cloud-bigquery Documentation", + "bigquery-magics", + "bigquery-magics Documentation", [author], 1, ) @@ -336,11 +335,11 @@ texinfo_documents = [ ( root_doc, - "google-cloud-bigquery", - "google-cloud-bigquery Documentation", + "bigquery-magics", + "bigquery-magics Documentation", author, - "google-cloud-bigquery", - "google-cloud-bigquery Library", + "bigquery-magics", + "bigquery-magics Library", "APIs", ) ] @@ -369,9 +368,11 @@ "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), - "dateutil": ("https://dateutil.readthedocs.io/en/latest/", None), - "geopandas": ("https://geopandas.org/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), + "pydata-google-auth": ( + "https://pydata-google-auth.readthedocs.io/en/latest/", + None, + ), } diff --git a/noxfile.py b/noxfile.py index 31824c9..eb76782 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1,10 +1,12 @@ -# Copyright 2016 Google LLC +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,85 +14,173 @@ # See the License for the specific language governing permissions and # limitations under the License. +# Generated by synthtool. DO NOT EDIT! + from __future__ import absolute_import -import pathlib import os +import pathlib import re import shutil +from typing import Dict, List +import warnings import nox - -MYPY_VERSION = "mypy==1.6.1" -PYTYPE_VERSION = "pytype==2021.4.9" -BLACK_VERSION = "black==23.7.0" -BLACK_PATHS = ( - "docs", - "bigquery_magics", - "tests", - "noxfile.py", - "setup.py", -) +FLAKE8_VERSION = "flake8==6.1.0" +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" +LINT_PATHS = ["docs", "bigquery_magics", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" -SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.11", "3.12"] -UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] + +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [ + "freezegun", +] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [ + "tqdm", +] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = { + "3.9": [], +} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12"] +SYSTEM_TEST_STANDARD_DEPENDENCIES: List[str] = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [ + "tqdm", +] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() # 'docfx' is excluded since it only needs to run in 'docs-presubmit' nox.options.sessions = [ - "unit_noextras", "unit", "system", - "snippets", "cover", "lint", "lint_setup_py", "blacken", - "mypy", - "mypy_samples", - "pytype", "docs", + "format", ] +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True -def default(session, install_extras=True): - """Default unit test session. - This is intended to be run **without** an interpreter set, so - that the current ``python`` (on the ``PATH``) or the version of - Python corresponding to the ``nox`` binary the ``PATH`` can - run the tests. +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. """ - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + session.install(FLAKE8_VERSION, BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, ) + session.run("flake8", "bigquery_magics", "tests") - # Install all test dependencies, then install local packages in-place. - session.install( - "mock", - "pytest", - "google-cloud-testutils", - "pytest-cov", - "freezegun", - "-c", - constraints_path, + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, ) - if install_extras and session.python in ["3.11", "3.12"]: - install_target = ".[bqstorage,ipywidgets,pandas,tqdm,opentelemetry]" - elif install_extras: - install_target = ".[all]" + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) else: - install_target = "." - session.install("-e", install_target, "-c", constraints_path) + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) # Run py.test against the unit tests. session.run( "py.test", "--quiet", - "--cov=google/cloud/bigquery", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=bigquery_magics", "--cov=tests/unit", "--cov-append", "--cov-config=.coveragerc", @@ -107,169 +197,77 @@ def unit(session): default(session) -@nox.session(python=[UNIT_TEST_PYTHON_VERSIONS[0], UNIT_TEST_PYTHON_VERSIONS[-1]]) -def unit_noextras(session): - """Run the unit test suite.""" - - # Install optional dependencies that are out-of-date. - # https://github.com/googleapis/python-bigquery/issues/933 - # There is no pyarrow 1.0.0 package for Python 3.9. - if session.python == UNIT_TEST_PYTHON_VERSIONS[0]: - session.install("pyarrow==1.0.0") +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") - default(session, install_extras=False) + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) -@nox.session(python=DEFAULT_PYTHON_VERSION) -def mypy(session): - """Run type checks with mypy.""" - session.install("-e", ".[all]") - session.install(MYPY_VERSION) + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) - # Just install the dependencies' type info directly, since "mypy --install-types" - # might require an additional pass. - session.install( - "types-protobuf", - "types-python-dateutil", - "types-requests", - "types-setuptools", - ) - session.run("mypy", "-p", "google", "--show-traceback") + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] -@nox.session(python=DEFAULT_PYTHON_VERSION) -def pytype(session): - """Run type checks with pytype.""" - # An indirect dependecy attrs==21.1.0 breaks the check, and installing a less - # recent version avoids the error until a possibly better fix is found. - # https://github.com/googleapis/python-bigquery/issues/655 - session.install("attrs==20.3.0") - session.install("-e", ".[all]") - session.install(PYTYPE_VERSION) - # See https://github.com/google/pytype/issues/464 - session.run("pytype", "-P", ".", "google/cloud/bigquery") + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" - constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") - # Sanity check: Only run system tests if the environment variable is set. - if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): - session.skip("Credentials must be set via environment variable.") + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") - # Use pre-release gRPC for system tests. - # Exclude version 1.49.0rc1 which has a known issue. - # See https://github.com/grpc/grpc/pull/30642 - session.install("--pre", "grpcio!=1.49.0rc1", "-c", constraints_path) - - # Install all test dependencies, then install local packages in place. - session.install( - "mock", "pytest", "psutil", "google-cloud-testutils", "-c", constraints_path - ) - if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") == "true": - # mTLS test requires pyopenssl and latest google-cloud-storage - session.install("google-cloud-storage", "pyopenssl") - else: - session.install("google-cloud-storage", "-c", constraints_path) - - # Data Catalog needed for the column ACL test with a real Policy Tag. - session.install("google-cloud-datacatalog", "-c", constraints_path) - - if session.python in ["3.11", "3.12"]: - extras = "[bqstorage,ipywidgets,pandas,tqdm,opentelemetry]" - else: - extras = "[all]" - session.install("-e", f".{extras}", "-c", constraints_path) + install_systemtest_dependencies(session, "-c", constraints_path) # Run py.test against the system tests. - session.run( - "py.test", - "--quiet", - os.path.join("tests", "system"), - *session.posargs, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def mypy_samples(session): - """Run type checks with mypy.""" - session.install("pytest") - for requirements_path in CURRENT_DIRECTORY.glob("samples/*/requirements.txt"): - session.install("-r", str(requirements_path)) - session.install(MYPY_VERSION) - - # requirements.txt might include this package. Install from source so that - # we can author samples with unreleased features. - session.install("-e", ".[all]") - - # Just install the dependencies' type info directly, since "mypy --install-types" - # might require an additional pass. - session.install( - "types-mock", - "types-pytz", - "types-protobuf!=4.24.0.20240106", # This version causes an error: 'Module "google.oauth2" has no attribute "service_account"' - "types-python-dateutil", - "types-requests", - "types-setuptools", - ) - - session.install("typing-extensions") # for TypedDict in pre-3.8 Python versions - - session.run( - "mypy", - "--config-file", - str(CURRENT_DIRECTORY / "samples" / "mypy.ini"), - "--no-incremental", # Required by warn-unused-configs from mypy.ini to work - "samples/", - ) - - -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def snippets(session): - """Run the snippets test suite.""" - - # Check the value of `RUN_SNIPPETS_TESTS` env var. It defaults to true. - if os.environ.get("RUN_SNIPPETS_TESTS", "true") == "false": - session.skip("RUN_SNIPPETS_TESTS is set to false, skipping") - - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - - # Install all test dependencies, then install local packages in place. - session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path) - session.install("google-cloud-storage", "-c", constraints_path) - session.install("grpcio", "-c", constraints_path) - - if session.python in ["3.11", "3.12"]: - extras = "[bqstorage,ipywidgets,pandas,tqdm,opentelemetry]" - else: - extras = "[all]" - session.install("-e", f".{extras}", "-c", constraints_path) - - # Run py.test against the snippets tests. - # Skip tests in samples/snippets, as those are run in a different session - # using the nox config from that directory. - session.run("py.test", os.path.join("docs", "snippets.py"), *session.posargs) - session.run( - "py.test", - "samples", - "--ignore=samples/desktopapp", - "--ignore=samples/magics", - "--ignore=samples/geography", - "--ignore=samples/notebooks", - "--ignore=samples/snippets", - *session.posargs, - ) + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -280,141 +278,16 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - session.run("coverage", "erase") + session.run("coverage", "report", "--show-missing", "--fail-under=96") - -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def prerelease_deps(session): - """Run all tests with prerelease versions of dependencies installed. - - https://github.com/googleapis/python-bigquery/issues/95 - """ - # PyArrow prerelease packages are published to an alternative PyPI host. - # https://arrow.apache.org/docs/python/install.html#installing-nightly-packages - session.install( - "--extra-index-url", - "https://pypi.fury.io/arrow-nightlies/", - "--prefer-binary", - "--pre", - "--upgrade", - "pyarrow", - ) - session.install( - "--extra-index-url", - "https://pypi.anaconda.org/scipy-wheels-nightly/simple", - "--prefer-binary", - "--pre", - "--upgrade", - "pandas", - ) - session.install( - "--pre", - "--upgrade", - "IPython", - "ipykernel", - "ipywidgets", - "tqdm", - "git+https://github.com/pypa/packaging.git", - ) - - session.install( - "--pre", - "--upgrade", - "google-api-core", - "google-cloud-bigquery-storage", - "google-cloud-core", - "google-resumable-media", - # Exclude version 1.49.0rc1 which has a known issue. See https://github.com/grpc/grpc/pull/30642 - "grpcio!=1.49.0rc1", - ) - session.install( - "freezegun", - "google-cloud-datacatalog", - "google-cloud-storage", - "google-cloud-testutils", - "mock", - "psutil", - "pytest", - "pytest-cov", - ) - - # Because we test minimum dependency versions on the minimum Python - # version, the first version we test with in the unit tests sessions has a - # constraints file containing all dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - # We use --no-deps to ensure that pre-release versions aren't overwritten - # by the version ranges in setup.py. - session.install(*deps) - session.install("--no-deps", "-e", ".[all]") - - # Print out prerelease package versions. - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run("python", "-c", "import pandas; print(pandas.__version__)") - session.run("python", "-c", "import pyarrow; print(pyarrow.__version__)") - session.run("python", "-m", "pip", "freeze") - - # Run all tests, except a few samples tests which require extra dependencies. - session.run("py.test", "tests/unit") - session.run("py.test", "tests/system") - session.run("py.test", "samples/tests") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - - session.install("flake8", BLACK_VERSION) - session.install("-e", ".") - session.run("flake8", os.path.join("google", "cloud", "bigquery")) - session.run("flake8", "tests") - session.run("flake8", os.path.join("docs", "samples")) - session.run("flake8", os.path.join("docs", "snippets.py")) - session.run("flake8", "benchmark") - session.run("black", "--check", *BLACK_PATHS) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - - session.install("docutils", "Pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. - Format code to uniform standard. - """ - - session.install(BLACK_VERSION) - session.run("black", *BLACK_PATHS) + session.run("coverage", "erase") @nox.session(python="3.9") def docs(session): - """Build the docs.""" + """Build the docs for this library.""" + session.install("-e", ".") session.install( # We need to pin to specific versions of the `sphinxcontrib-*` packages # which still support sphinx 4.x. @@ -429,8 +302,6 @@ def docs(session): "alabaster", "recommonmark", ) - session.install("google-cloud-storage") - session.install("-e", ".[all]") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( @@ -491,3 +362,93 @@ def docfx(session): os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run("py.test", "tests/unit") + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh new file mode 100755 index 0000000..0018b42 --- /dev/null +++ b/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py new file mode 100644 index 0000000..1acc119 --- /dev/null +++ b/scripts/readme-gen/readme_gen.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) + +README_TMPL = jinja_env.get_template("README.tmpl.rst") + + +def get_help(file): + return subprocess.check_output(["python", file, "--help"]).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("source") + parser.add_argument("--destination", default="README.rst") + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals["get_help"] = get_help + + with io.open(source, "r") as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, "w") as f: + f.write(output) + + +if __name__ == "__main__": + main() diff --git a/scripts/readme-gen/templates/README.tmpl.rst b/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 0000000..4fd2397 --- /dev/null +++ b/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/scripts/readme-gen/templates/auth.tmpl.rst b/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 0000000..1446b94 --- /dev/null +++ b/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 0000000..11957ce --- /dev/null +++ b/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/scripts/readme-gen/templates/install_deps.tmpl.rst b/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 0000000..6f069c6 --- /dev/null +++ b/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 3.7+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 0000000..5ea33d1 --- /dev/null +++ b/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/setup.cfg b/setup.cfg index 37b63aa..0523500 100644 --- a/setup.cfg +++ b/setup.cfg @@ -17,17 +17,3 @@ # Generated by synthtool. DO NOT EDIT! [bdist_wheel] universal = 1 - -[pytype] -python_version = 3.8 -inputs = - google/cloud/ -exclude = - tests/ - google/cloud/bigquery_v2/ # Legacy proto-based types. -output = .pytype/ -disable = - # There's some issue with finding some pyi files, thus disabling. - # The issue https://github.com/google/pytype/issues/150 is closed, but the - # error still occurs for some reason. - pyi-error From 79716486ea974106e465af9a08c4edd056448595 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Tue, 9 Apr 2024 14:51:51 -0500 Subject: [PATCH 06/20] multiple teams: see: https://github.com/googleapis/synthtool/issues/946 --- .repo-metadata.json | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/.repo-metadata.json b/.repo-metadata.json index ccfd06d..55ee426 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -12,7 +12,5 @@ "api_id": "bigquery.googleapis.com", "requires_billing": false, "default_version": "", - "codeowner_team": "@googleapis/api-bigquery", - "api_shortname": "bigquery", - "api_description": "is a fully managed, NoOps, low cost data analytics service.\nData can be streamed into BigQuery at millions of rows per second to enable real-time analysis.\nWith BigQuery you can easily deploy Petabyte-scale Databases." + "codeowner_team": "@googleapis/api-bigquery @googleapis/api-bigquery-dataframe" } From 71df8f3b2908d997972573c77508c35fd0369a58 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 9 Apr 2024 19:53:48 +0000 Subject: [PATCH 07/20] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20?= =?UTF-8?q?post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- .github/CODEOWNERS | 8 ++++---- .github/blunderbuss.yml | 6 +++--- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 193b436..24c0ca9 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -5,8 +5,8 @@ # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax # Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. -# @googleapis/yoshi-python @googleapis/api-bigquery are the default owners for changes in this repo -* @googleapis/yoshi-python @googleapis/api-bigquery +# @googleapis/yoshi-python @googleapis/api-bigquery @googleapis/api-bigquery-dataframe are the default owners for changes in this repo +* @googleapis/yoshi-python @googleapis/api-bigquery @googleapis/api-bigquery-dataframe -# @googleapis/python-samples-reviewers @googleapis/api-bigquery are the default owners for samples changes -/samples/ @googleapis/python-samples-reviewers @googleapis/api-bigquery +# @googleapis/python-samples-reviewers @googleapis/api-bigquery @googleapis/api-bigquery-dataframe are the default owners for samples changes +/samples/ @googleapis/python-samples-reviewers @googleapis/api-bigquery @googleapis/api-bigquery-dataframe diff --git a/.github/blunderbuss.yml b/.github/blunderbuss.yml index 5b7383d..f8fe0bb 100644 --- a/.github/blunderbuss.yml +++ b/.github/blunderbuss.yml @@ -4,14 +4,14 @@ # Note: This file is autogenerated. To make changes to the assignee # team, please update `codeowner_team` in `.repo-metadata.json`. assign_issues: - - googleapis/api-bigquery + - googleapis/api-bigquery googleapis/api-bigquery-dataframe assign_issues_by: - labels: - "samples" to: - googleapis/python-samples-reviewers - - googleapis/api-bigquery + - googleapis/api-bigquery googleapis/api-bigquery-dataframe assign_prs: - - googleapis/api-bigquery + - googleapis/api-bigquery googleapis/api-bigquery-dataframe From d4ddffba6be79bf9943fe5c0db4ecbf0246a8bdf Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Tue, 9 Apr 2024 15:00:07 -0500 Subject: [PATCH 08/20] chore: grant api-bigquery and api-bigquery-dataframe teams write access to repo --- .github/sync-repo-settings.yaml | 42 +++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 .github/sync-repo-settings.yaml diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml new file mode 100644 index 0000000..32ad861 --- /dev/null +++ b/.github/sync-repo-settings.yaml @@ -0,0 +1,42 @@ +# https://github.com/googleapis/repo-automation-bots/tree/main/packages/sync-repo-settings +# Rules for main branch protection +branchProtectionRules: +# Identifies the protection rule pattern. Name of the branch to be protected. +# Defaults to `main` +- pattern: main + requiresCodeOwnerReviews: true + requiresStrictStatusChecks: true + requiredStatusCheckContexts: + - 'cla/google' + - 'OwlBot Post Processor' + # TODO(tswast): enable once we have tests migrated from google-cloud-bigquery + # - 'lint' + # - 'docs' + # - 'unit (3.8)' + # - 'unit (3.9)' + # - 'unit (3.10)' + # - 'unit (3.11)' + # - 'unit (3.12)' + # - 'cover' + # - 'Kokoro' + # - 'Samples - Lint' + # - 'Samples - Python 3.8' + # - 'Samples - Python 3.9' + # - 'Samples - Python 3.10' + # - 'Samples - Python 3.11' + # - 'Samples - Python 3.12' +permissionRules: + - team: actools-python + permission: admin + - team: actools + permission: admin + - team: api-bigquery + permission: push + - team: api-bigquery-dataframe + permission: push + - team: yoshi-python + permission: push + - team: python-samples-owners + permission: push + - team: python-samples-reviewers + permission: push \ No newline at end of file From 04519fb072bb042a0c9ee2e3608a9116a7e0a873 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Wed, 10 Apr 2024 11:22:43 -0500 Subject: [PATCH 09/20] add python 3.7 --- owlbot.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/owlbot.py b/owlbot.py index 4d7f873..b274d80 100644 --- a/owlbot.py +++ b/owlbot.py @@ -30,14 +30,13 @@ extras_by_python = { # Use a middle version of Python to test when no extras are installed. - "3.9": [] + "3.8": [] } extras = ["tqdm"] templated_files = common.py_library( - unit_test_python_versions=["3.8", "3.9", "3.10", "3.11", "3.12"], - system_test_python_versions=["3.8", "3.9", "3.10", "3.11", "3.12"], - cov_level=96, - unit_test_external_dependencies=["freezegun"], + unit_test_python_versions=["3.7", "3.8", "3.11", "3.12"], + system_test_python_versions=["3.8", "3.12"], + cov_level=100, unit_test_extras=extras, unit_test_extras_by_python=extras_by_python, system_test_extras=extras, From f59fef71c9760b9b9be04505f7ab558aa12243e5 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Wed, 10 Apr 2024 16:24:50 +0000 Subject: [PATCH 10/20] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20?= =?UTF-8?q?post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- .coveragerc | 2 +- .github/workflows/unittest.yml | 4 ++-- CONTRIBUTING.rst | 12 +++++------- noxfile.py | 12 +++++------- 4 files changed, 13 insertions(+), 17 deletions(-) diff --git a/.coveragerc b/.coveragerc index 6948e4d..c540edf 100644 --- a/.coveragerc +++ b/.coveragerc @@ -22,7 +22,7 @@ omit = google/cloud/__init__.py [report] -fail_under = 96 +fail_under = 100 show_missing = True exclude_lines = # Re-enable the standard pragma diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index 3c11914..495a7ef 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python: ['3.7', '3.8', '3.11', '3.12'] steps: - name: Checkout uses: actions/checkout@v4 @@ -54,4 +54,4 @@ jobs: run: | find .coverage-results -type f -name '*.zip' -exec unzip {} \; coverage combine .coverage-results/**/.coverage* - coverage report --show-missing --fail-under=96 + coverage report --show-missing --fail-under=100 diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index e62de39..582c599 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. + 3.7, 3.8, 3.11 and 3.12 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -148,7 +148,7 @@ Running System Tests .. note:: - System tests are only configured to run under Python 3.8, 3.9, 3.10, 3.11 and 3.12. + System tests are only configured to run under Python 3.8 and 3.12. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local @@ -221,15 +221,13 @@ Supported Python Versions We support: +- `Python 3.7`_ - `Python 3.8`_ -- `Python 3.9`_ -- `Python 3.10`_ - `Python 3.11`_ - `Python 3.12`_ +.. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ -.. _Python 3.9: https://docs.python.org/3.9/ -.. _Python 3.10: https://docs.python.org/3.10/ .. _Python 3.11: https://docs.python.org/3.11/ .. _Python 3.12: https://docs.python.org/3.12/ @@ -239,7 +237,7 @@ Supported versions can be found in our ``noxfile.py`` `config`_. .. _config: https://github.com/googleapis/python-bigquery-magics/blob/main/noxfile.py -We also explicitly decided to support Python 3 beginning with version 3.8. +We also explicitly decided to support Python 3 beginning with version 3.7. Reasons for this include: - Encouraging use of newest versions of Python 3 diff --git a/noxfile.py b/noxfile.py index eb76782..08ee214 100644 --- a/noxfile.py +++ b/noxfile.py @@ -34,7 +34,7 @@ DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12"] +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.11", "3.12"] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", @@ -42,19 +42,17 @@ "pytest-cov", "pytest-asyncio", ] -UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [ - "freezegun", -] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] UNIT_TEST_DEPENDENCIES: List[str] = [] UNIT_TEST_EXTRAS: List[str] = [ "tqdm", ] UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = { - "3.9": [], + "3.8": [], } -SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12"] +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.12"] SYSTEM_TEST_STANDARD_DEPENDENCIES: List[str] = [ "mock", "pytest", @@ -278,7 +276,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=96") + session.run("coverage", "report", "--show-missing", "--fail-under=100") session.run("coverage", "erase") From fa3ce9f9be46a8c7eed5849b1637579aa953c4b1 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Wed, 10 Apr 2024 11:05:47 -0500 Subject: [PATCH 11/20] chore: copy magics unit tests from google-cloud-bigquery --- .coveragerc | 31 +- .github/workflows/docs.yml | 38 + .github/workflows/lint.yml | 25 + .github/workflows/unittest.yml | 57 + .kokoro/build.sh | 9 +- .kokoro/continuous/common.cfg | 4 +- .kokoro/docker/docs/Dockerfile | 4 + .kokoro/docker/docs/requirements.in | 1 + .kokoro/docker/docs/requirements.txt | 38 + .kokoro/docs/common.cfg | 9 +- .kokoro/docs/docs-presubmit.cfg | 2 +- .kokoro/presubmit/common.cfg | 4 +- .kokoro/presubmit/presubmit.cfg | 12 +- .kokoro/release.sh | 4 +- .kokoro/release/common.cfg | 8 +- .kokoro/requirements.in | 3 +- .kokoro/requirements.txt | 105 +- .kokoro/samples/lint/common.cfg | 4 +- .kokoro/samples/python3.10/common.cfg | 4 +- .kokoro/samples/python3.10/periodic-head.cfg | 2 +- .kokoro/samples/python3.11/common.cfg | 4 +- .kokoro/samples/python3.11/periodic-head.cfg | 2 +- .kokoro/samples/python3.12/common.cfg | 4 +- .kokoro/samples/python3.12/periodic-head.cfg | 2 +- .kokoro/samples/python3.7/common.cfg | 4 +- .kokoro/samples/python3.7/periodic-head.cfg | 2 +- .kokoro/samples/python3.8/common.cfg | 4 +- .kokoro/samples/python3.8/periodic-head.cfg | 2 +- .kokoro/samples/python3.9/common.cfg | 4 +- .kokoro/samples/python3.9/periodic-head.cfg | 2 +- bigquery_magics/version.py | 15 + noxfile.py | 594 +++-- setup.py | 78 +- testing/constraints-3.10.txt | 3 +- testing/constraints-3.11.txt | 2 + testing/constraints-3.12.txt | 2 + testing/constraints-3.7.txt | 18 +- testing/constraints-3.8.txt | 10 +- testing/constraints-3.9.txt | 13 +- tests/system/test_bigquery.py | 82 + tests/unit/line_arg_parser/__init__.py | 13 + tests/unit/line_arg_parser/test_lexer.py | 34 + tests/unit/line_arg_parser/test_parser.py | 206 ++ tests/unit/line_arg_parser/test_visitors.py | 36 + tests/unit/test_bigquery.py | 2073 ++++++++++++++++++ 45 files changed, 3054 insertions(+), 519 deletions(-) create mode 100644 .github/workflows/docs.yml create mode 100644 .github/workflows/lint.yml create mode 100644 .github/workflows/unittest.yml create mode 100644 .kokoro/docker/docs/requirements.in create mode 100644 .kokoro/docker/docs/requirements.txt create mode 100644 bigquery_magics/version.py create mode 100644 tests/system/test_bigquery.py create mode 100644 tests/unit/line_arg_parser/__init__.py create mode 100644 tests/unit/line_arg_parser/test_lexer.py create mode 100644 tests/unit/line_arg_parser/test_parser.py create mode 100644 tests/unit/line_arg_parser/test_visitors.py create mode 100644 tests/unit/test_bigquery.py diff --git a/.coveragerc b/.coveragerc index 0409225..c540edf 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,14 +1,39 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! [run] branch = True +omit = + google/__init__.py + google/cloud/__init__.py [report] fail_under = 100 show_missing = True -omit = - google/cloud/bigquery/__init__.py - google/cloud/bigquery_v2/* # Legacy proto-based types. exclude_lines = # Re-enable the standard pragma pragma: NO COVER # Ignore debug-only repr def __repr__ + # Ignore abstract methods + raise NotImplementedError +omit = + */gapic/*.py + */proto/*.py + */core/*.py + */site-packages/*.py + google/cloud/__init__.py diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 0000000..698fbc5 --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,38 @@ +on: + pull_request: + branches: + - main +name: docs +jobs: + docs: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.9" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run docs + run: | + nox -s docs + docfx: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run docfx + run: | + nox -s docfx diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 0000000..4866193 --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,25 @@ +on: + pull_request: + branches: + - main +name: lint +jobs: + lint: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.8" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run lint + run: | + nox -s lint + - name: Run lint_setup_py + run: | + nox -s lint_setup_py diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml new file mode 100644 index 0000000..495a7ef --- /dev/null +++ b/.github/workflows/unittest.yml @@ -0,0 +1,57 @@ +on: + pull_request: + branches: + - main +name: unittest +jobs: + unit: + runs-on: ubuntu-latest + strategy: + matrix: + python: ['3.7', '3.8', '3.11', '3.12'] + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python }} + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run unit tests + env: + COVERAGE_FILE: .coverage-${{ matrix.python }} + run: | + nox -s unit-${{ matrix.python }} + - name: Upload coverage results + uses: actions/upload-artifact@v4 + with: + name: coverage-artifact-${{ matrix.python }} + path: .coverage-${{ matrix.python }} + + cover: + runs-on: ubuntu-latest + needs: + - unit + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.8" + - name: Install coverage + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install coverage + - name: Download coverage results + uses: actions/download-artifact@v4 + with: + path: .coverage-results/ + - name: Report coverage results + run: | + find .coverage-results -type f -name '*.zip' -exec unzip {} \; + coverage combine .coverage-results/**/.coverage* + coverage report --show-missing --fail-under=100 diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 0cb0d0d..bc5509a 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -16,7 +16,7 @@ set -eo pipefail if [[ -z "${PROJECT_ROOT:-}" ]]; then - PROJECT_ROOT="github/python-bigquery" + PROJECT_ROOT="github/python-bigquery-magics" fi cd "${PROJECT_ROOT}" @@ -33,13 +33,6 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json # Setup project id. export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") -# Remove old nox -python3 -m pip uninstall --yes --quiet nox-automation - -# Install nox -python3 -m pip install --upgrade --quiet nox -python3 -m nox --version - # If this is a continuous build, send the test log to the FlakyBot. # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then diff --git a/.kokoro/continuous/common.cfg b/.kokoro/continuous/common.cfg index 1f46f62..bdccd73 100644 --- a/.kokoro/continuous/common.cfg +++ b/.kokoro/continuous/common.cfg @@ -14,7 +14,7 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" # Use the trampoline script to run in docker. -build_file: "python-bigquery/.kokoro/trampoline.sh" +build_file: "python-bigquery-magics/.kokoro/trampoline.sh" # Configure the docker image for kokoro-trampoline. env_vars: { @@ -23,5 +23,5 @@ env_vars: { } env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/build.sh" + value: "github/python-bigquery-magics/.kokoro/build.sh" } diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index 8e39a2c..bdaf39f 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -80,4 +80,8 @@ RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ # Test pip RUN python3 -m pip +# Install build requirements +COPY requirements.txt /requirements.txt +RUN python3 -m pip install --require-hashes -r requirements.txt + CMD ["python3.8"] diff --git a/.kokoro/docker/docs/requirements.in b/.kokoro/docker/docs/requirements.in new file mode 100644 index 0000000..816817c --- /dev/null +++ b/.kokoro/docker/docs/requirements.in @@ -0,0 +1 @@ +nox diff --git a/.kokoro/docker/docs/requirements.txt b/.kokoro/docker/docs/requirements.txt new file mode 100644 index 0000000..0e5d70f --- /dev/null +++ b/.kokoro/docker/docs/requirements.txt @@ -0,0 +1,38 @@ +# +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: +# +# pip-compile --allow-unsafe --generate-hashes requirements.in +# +argcomplete==3.2.3 \ + --hash=sha256:bf7900329262e481be5a15f56f19736b376df6f82ed27576fa893652c5de6c23 \ + --hash=sha256:c12355e0494c76a2a7b73e3a59b09024ca0ba1e279fb9ed6c1b82d5b74b6a70c + # via nox +colorlog==6.8.2 \ + --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ + --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 + # via nox +distlib==0.3.8 \ + --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ + --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 + # via virtualenv +filelock==3.13.1 \ + --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ + --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c + # via virtualenv +nox==2024.3.2 \ + --hash=sha256:e53514173ac0b98dd47585096a55572fe504fecede58ced708979184d05440be \ + --hash=sha256:f521ae08a15adbf5e11f16cb34e8d0e6ea521e0b92868f684e91677deb974553 + # via -r requirements.in +packaging==24.0 \ + --hash=sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5 \ + --hash=sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9 + # via nox +platformdirs==4.2.0 \ + --hash=sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068 \ + --hash=sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768 + # via virtualenv +virtualenv==20.25.1 \ + --hash=sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a \ + --hash=sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197 + # via nox diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg index 41b86fc..dc9cdf0 100644 --- a/.kokoro/docs/common.cfg +++ b/.kokoro/docs/common.cfg @@ -11,7 +11,7 @@ action { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery/.kokoro/trampoline_v2.sh" +build_file: "python-bigquery-magics/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { @@ -20,7 +20,7 @@ env_vars: { } env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/publish-docs.sh" + value: "github/python-bigquery-magics/.kokoro/publish-docs.sh" } env_vars: { @@ -30,8 +30,9 @@ env_vars: { env_vars: { key: "V2_STAGING_BUCKET" - # Push google cloud library docs to the Cloud RAD bucket `docs-staging-v2` - value: "docs-staging-v2" + # Push non-cloud library docs to `docs-staging-v2-staging` instead of the + # Cloud RAD bucket `docs-staging-v2` + value: "docs-staging-v2-staging" } # It will upload the docker image after successful builds. diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg index 08adb2e..7fd6950 100644 --- a/.kokoro/docs/docs-presubmit.cfg +++ b/.kokoro/docs/docs-presubmit.cfg @@ -18,7 +18,7 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/build.sh" + value: "github/python-bigquery-magics/.kokoro/build.sh" } # Only run this nox session. diff --git a/.kokoro/presubmit/common.cfg b/.kokoro/presubmit/common.cfg index 1f46f62..bdccd73 100644 --- a/.kokoro/presubmit/common.cfg +++ b/.kokoro/presubmit/common.cfg @@ -14,7 +14,7 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" # Use the trampoline script to run in docker. -build_file: "python-bigquery/.kokoro/trampoline.sh" +build_file: "python-bigquery-magics/.kokoro/trampoline.sh" # Configure the docker image for kokoro-trampoline. env_vars: { @@ -23,5 +23,5 @@ env_vars: { } env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/build.sh" + value: "github/python-bigquery-magics/.kokoro/build.sh" } diff --git a/.kokoro/presubmit/presubmit.cfg b/.kokoro/presubmit/presubmit.cfg index 17d071c..8f43917 100644 --- a/.kokoro/presubmit/presubmit.cfg +++ b/.kokoro/presubmit/presubmit.cfg @@ -1,11 +1 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Disable system tests. -env_vars: { - key: "RUN_SYSTEM_TESTS" - value: "false" -} -env_vars: { - key: "RUN_SNIPPETS_TESTS" - value: "false" -} +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/.kokoro/release.sh b/.kokoro/release.sh index 078fc1c..9703b70 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -16,7 +16,7 @@ set -eo pipefail # Start the releasetool reporter -python3 -m pip install --require-hashes -r github/python-bigquery/.kokoro/requirements.txt +python3 -m pip install --require-hashes -r github/python-bigquery-magics/.kokoro/requirements.txt python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script # Disable buffering, so that the logs stream through. @@ -24,6 +24,6 @@ export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1") -cd github/python-bigquery +cd github/python-bigquery-magics python3 setup.py sdist bdist_wheel twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index cb8bbaa..58691a1 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -11,7 +11,7 @@ action { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery/.kokoro/trampoline.sh" +build_file: "python-bigquery-magics/.kokoro/trampoline.sh" # Configure the docker image for kokoro-trampoline. env_vars: { @@ -20,7 +20,7 @@ env_vars: { } env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/release.sh" + value: "github/python-bigquery-magics/.kokoro/release.sh" } # Fetch PyPI password @@ -43,7 +43,7 @@ env_vars: { # what we published, which we can use to generate SBOMs and attestations. action { define_artifacts { - regex: "github/python-bigquery/**/*.tar.gz" - strip_prefix: "github/python-bigquery" + regex: "github/python-bigquery-magics/**/*.tar.gz" + strip_prefix: "github/python-bigquery-magics" } } diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in index ec867d9..fff4d9c 100644 --- a/.kokoro/requirements.in +++ b/.kokoro/requirements.in @@ -1,5 +1,5 @@ gcp-docuploader -gcp-releasetool>=1.10.5 # required for compatibility with cryptography>=39.x +gcp-releasetool>=2 # required for compatibility with cryptography>=42.x importlib-metadata typing-extensions twine @@ -8,3 +8,4 @@ setuptools nox>=2022.11.21 # required to remove dependency on py charset-normalizer<3 click<8.1.0 +cryptography>=42.0.5 diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index bb3d6ca..dd61f5f 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -93,31 +93,41 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==41.0.6 \ - --hash=sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596 \ - --hash=sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c \ - --hash=sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660 \ - --hash=sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4 \ - --hash=sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead \ - --hash=sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed \ - --hash=sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3 \ - --hash=sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7 \ - --hash=sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09 \ - --hash=sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c \ - --hash=sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43 \ - --hash=sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65 \ - --hash=sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6 \ - --hash=sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da \ - --hash=sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c \ - --hash=sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b \ - --hash=sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8 \ - --hash=sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c \ - --hash=sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d \ - --hash=sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9 \ - --hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \ - --hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \ - --hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae +cryptography==42.0.5 \ + --hash=sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee \ + --hash=sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576 \ + --hash=sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d \ + --hash=sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30 \ + --hash=sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413 \ + --hash=sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb \ + --hash=sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da \ + --hash=sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4 \ + --hash=sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd \ + --hash=sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc \ + --hash=sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8 \ + --hash=sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1 \ + --hash=sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc \ + --hash=sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e \ + --hash=sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8 \ + --hash=sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940 \ + --hash=sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400 \ + --hash=sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7 \ + --hash=sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16 \ + --hash=sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278 \ + --hash=sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74 \ + --hash=sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec \ + --hash=sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1 \ + --hash=sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2 \ + --hash=sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c \ + --hash=sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922 \ + --hash=sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a \ + --hash=sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6 \ + --hash=sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1 \ + --hash=sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e \ + --hash=sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac \ + --hash=sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7 # via + # -r requirements.in # gcp-releasetool # secretstorage distlib==0.3.7 \ @@ -136,9 +146,9 @@ gcp-docuploader==0.6.5 \ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==1.16.0 \ - --hash=sha256:27bf19d2e87aaa884096ff941aa3c592c482be3d6a2bfe6f06afafa6af2353e3 \ - --hash=sha256:a316b197a543fd036209d0caba7a8eb4d236d8e65381c80cbc6d7efaa7606d63 +gcp-releasetool==2.0.0 \ + --hash=sha256:3d73480b50ba243f22d7c7ec08b115a30e1c7817c4899781840c26f9c55b8277 \ + --hash=sha256:7aa9fd935ec61e581eb8458ad00823786d91756c25e492f372b2b30962f3c28f # via -r requirements.in google-api-core==2.12.0 \ --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ @@ -383,29 +393,18 @@ platformdirs==3.11.0 \ --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e # via virtualenv -protobuf==3.20.3 \ - --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ - --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ - --hash=sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2 \ - --hash=sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b \ - --hash=sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050 \ - --hash=sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9 \ - --hash=sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7 \ - --hash=sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454 \ - --hash=sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480 \ - --hash=sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469 \ - --hash=sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c \ - --hash=sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e \ - --hash=sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db \ - --hash=sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905 \ - --hash=sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b \ - --hash=sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86 \ - --hash=sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4 \ - --hash=sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402 \ - --hash=sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7 \ - --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ - --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ - --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee +protobuf==4.25.3 \ + --hash=sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4 \ + --hash=sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8 \ + --hash=sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c \ + --hash=sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d \ + --hash=sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4 \ + --hash=sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa \ + --hash=sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c \ + --hash=sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019 \ + --hash=sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9 \ + --hash=sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c \ + --hash=sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2 # via # gcp-docuploader # gcp-releasetool @@ -509,7 +508,7 @@ zipp==3.17.0 \ # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==68.2.2 \ - --hash=sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87 \ - --hash=sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a +setuptools==69.2.0 \ + --hash=sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e \ + --hash=sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c # via -r requirements.in diff --git a/.kokoro/samples/lint/common.cfg b/.kokoro/samples/lint/common.cfg index 153746c..8a0c6f4 100644 --- a/.kokoro/samples/lint/common.cfg +++ b/.kokoro/samples/lint/common.cfg @@ -15,7 +15,7 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/test-samples.sh" + value: "github/python-bigquery-magics/.kokoro/test-samples.sh" } # Configure the docker image for kokoro-trampoline. @@ -31,4 +31,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery/.kokoro/trampoline_v2.sh" \ No newline at end of file +build_file: "python-bigquery-magics/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.10/common.cfg b/.kokoro/samples/python3.10/common.cfg index da4003d..c6437eb 100644 --- a/.kokoro/samples/python3.10/common.cfg +++ b/.kokoro/samples/python3.10/common.cfg @@ -21,7 +21,7 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/test-samples.sh" + value: "github/python-bigquery-magics/.kokoro/test-samples.sh" } # Configure the docker image for kokoro-trampoline. @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery/.kokoro/trampoline_v2.sh" \ No newline at end of file +build_file: "python-bigquery-magics/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.10/periodic-head.cfg b/.kokoro/samples/python3.10/periodic-head.cfg index 5aa01ba..47e8ef9 100644 --- a/.kokoro/samples/python3.10/periodic-head.cfg +++ b/.kokoro/samples/python3.10/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/test-samples-against-head.sh" + value: "github/python-bigquery-magics/.kokoro/test-samples-against-head.sh" } diff --git a/.kokoro/samples/python3.11/common.cfg b/.kokoro/samples/python3.11/common.cfg index f5adc87..3162e80 100644 --- a/.kokoro/samples/python3.11/common.cfg +++ b/.kokoro/samples/python3.11/common.cfg @@ -21,7 +21,7 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/test-samples.sh" + value: "github/python-bigquery-magics/.kokoro/test-samples.sh" } # Configure the docker image for kokoro-trampoline. @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery/.kokoro/trampoline_v2.sh" \ No newline at end of file +build_file: "python-bigquery-magics/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.11/periodic-head.cfg b/.kokoro/samples/python3.11/periodic-head.cfg index 5aa01ba..47e8ef9 100644 --- a/.kokoro/samples/python3.11/periodic-head.cfg +++ b/.kokoro/samples/python3.11/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/test-samples-against-head.sh" + value: "github/python-bigquery-magics/.kokoro/test-samples-against-head.sh" } diff --git a/.kokoro/samples/python3.12/common.cfg b/.kokoro/samples/python3.12/common.cfg index 6eb699e..08b8a66 100644 --- a/.kokoro/samples/python3.12/common.cfg +++ b/.kokoro/samples/python3.12/common.cfg @@ -21,7 +21,7 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/test-samples.sh" + value: "github/python-bigquery-magics/.kokoro/test-samples.sh" } # Configure the docker image for kokoro-trampoline. @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery/.kokoro/trampoline_v2.sh" \ No newline at end of file +build_file: "python-bigquery-magics/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.12/periodic-head.cfg b/.kokoro/samples/python3.12/periodic-head.cfg index 5aa01ba..47e8ef9 100644 --- a/.kokoro/samples/python3.12/periodic-head.cfg +++ b/.kokoro/samples/python3.12/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/test-samples-against-head.sh" + value: "github/python-bigquery-magics/.kokoro/test-samples-against-head.sh" } diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg index d30dc60..bcdc172 100644 --- a/.kokoro/samples/python3.7/common.cfg +++ b/.kokoro/samples/python3.7/common.cfg @@ -21,7 +21,7 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/test-samples.sh" + value: "github/python-bigquery-magics/.kokoro/test-samples.sh" } # Configure the docker image for kokoro-trampoline. @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery/.kokoro/trampoline_v2.sh" \ No newline at end of file +build_file: "python-bigquery-magics/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.7/periodic-head.cfg b/.kokoro/samples/python3.7/periodic-head.cfg index 5aa01ba..47e8ef9 100644 --- a/.kokoro/samples/python3.7/periodic-head.cfg +++ b/.kokoro/samples/python3.7/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/test-samples-against-head.sh" + value: "github/python-bigquery-magics/.kokoro/test-samples-against-head.sh" } diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg index 46759c6..dfa6328 100644 --- a/.kokoro/samples/python3.8/common.cfg +++ b/.kokoro/samples/python3.8/common.cfg @@ -21,7 +21,7 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/test-samples.sh" + value: "github/python-bigquery-magics/.kokoro/test-samples.sh" } # Configure the docker image for kokoro-trampoline. @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery/.kokoro/trampoline_v2.sh" \ No newline at end of file +build_file: "python-bigquery-magics/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.8/periodic-head.cfg b/.kokoro/samples/python3.8/periodic-head.cfg index 5aa01ba..47e8ef9 100644 --- a/.kokoro/samples/python3.8/periodic-head.cfg +++ b/.kokoro/samples/python3.8/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/test-samples-against-head.sh" + value: "github/python-bigquery-magics/.kokoro/test-samples-against-head.sh" } diff --git a/.kokoro/samples/python3.9/common.cfg b/.kokoro/samples/python3.9/common.cfg index 58d56ce..a4cb02c 100644 --- a/.kokoro/samples/python3.9/common.cfg +++ b/.kokoro/samples/python3.9/common.cfg @@ -21,7 +21,7 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/test-samples.sh" + value: "github/python-bigquery-magics/.kokoro/test-samples.sh" } # Configure the docker image for kokoro-trampoline. @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery/.kokoro/trampoline_v2.sh" \ No newline at end of file +build_file: "python-bigquery-magics/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.9/periodic-head.cfg b/.kokoro/samples/python3.9/periodic-head.cfg index 5aa01ba..47e8ef9 100644 --- a/.kokoro/samples/python3.9/periodic-head.cfg +++ b/.kokoro/samples/python3.9/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery/.kokoro/test-samples-against-head.sh" + value: "github/python-bigquery-magics/.kokoro/test-samples-against-head.sh" } diff --git a/bigquery_magics/version.py b/bigquery_magics/version.py new file mode 100644 index 0000000..72b0b02 --- /dev/null +++ b/bigquery_magics/version.py @@ -0,0 +1,15 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +__version__ = "0.0.1" \ No newline at end of file diff --git a/noxfile.py b/noxfile.py index ae02223..08ee214 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1,10 +1,12 @@ -# Copyright 2016 Google LLC +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,88 +14,171 @@ # See the License for the specific language governing permissions and # limitations under the License. +# Generated by synthtool. DO NOT EDIT! + from __future__ import absolute_import -import pathlib import os +import pathlib import re import shutil +from typing import Dict, List +import warnings import nox - -MYPY_VERSION = "mypy==1.6.1" -PYTYPE_VERSION = "pytype==2021.4.9" -BLACK_VERSION = "black==23.7.0" -BLACK_PATHS = ( - "benchmark", - "docs", - "google", - "samples", - "samples/tests", - "tests", - "noxfile.py", - "setup.py", -) +FLAKE8_VERSION = "flake8==6.1.0" +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" +LINT_PATHS = ["docs", "bigquery_magics", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" -SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.11", "3.12"] -UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] + +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.11", "3.12"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [ + "tqdm", +] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = { + "3.8": [], +} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.12"] +SYSTEM_TEST_STANDARD_DEPENDENCIES: List[str] = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [ + "tqdm", +] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() # 'docfx' is excluded since it only needs to run in 'docs-presubmit' nox.options.sessions = [ - "unit_noextras", "unit", "system", - "snippets", "cover", "lint", "lint_setup_py", "blacken", - "mypy", - "mypy_samples", - "pytype", "docs", + "format", ] +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True -def default(session, install_extras=True): - """Default unit test session. - This is intended to be run **without** an interpreter set, so - that the current ``python`` (on the ``PATH``) or the version of - Python corresponding to the ``nox`` binary the ``PATH`` can - run the tests. +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. """ - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + session.install(FLAKE8_VERSION, BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, ) + session.run("flake8", "bigquery_magics", "tests") - # Install all test dependencies, then install local packages in-place. - session.install( - "mock", - "pytest", - "google-cloud-testutils", - "pytest-cov", - "freezegun", - "-c", - constraints_path, + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, ) - if install_extras and session.python in ["3.11", "3.12"]: - install_target = ".[bqstorage,ipywidgets,pandas,tqdm,opentelemetry]" - elif install_extras: - install_target = ".[all]" + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS else: - install_target = "." - session.install("-e", install_target, "-c", constraints_path) + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) # Run py.test against the unit tests. session.run( "py.test", "--quiet", - "--cov=google/cloud/bigquery", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=bigquery_magics", "--cov=tests/unit", "--cov-append", "--cov-config=.coveragerc", @@ -110,169 +195,77 @@ def unit(session): default(session) -@nox.session(python=[UNIT_TEST_PYTHON_VERSIONS[0], UNIT_TEST_PYTHON_VERSIONS[-1]]) -def unit_noextras(session): - """Run the unit test suite.""" - - # Install optional dependencies that are out-of-date. - # https://github.com/googleapis/python-bigquery/issues/933 - # There is no pyarrow 1.0.0 package for Python 3.9. - if session.python == UNIT_TEST_PYTHON_VERSIONS[0]: - session.install("pyarrow==1.0.0") +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") - default(session, install_extras=False) + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) -@nox.session(python=DEFAULT_PYTHON_VERSION) -def mypy(session): - """Run type checks with mypy.""" - session.install("-e", ".[all]") - session.install(MYPY_VERSION) + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) - # Just install the dependencies' type info directly, since "mypy --install-types" - # might require an additional pass. - session.install( - "types-protobuf", - "types-python-dateutil", - "types-requests", - "types-setuptools", - ) - session.run("mypy", "-p", "google", "--show-traceback") + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] -@nox.session(python=DEFAULT_PYTHON_VERSION) -def pytype(session): - """Run type checks with pytype.""" - # An indirect dependecy attrs==21.1.0 breaks the check, and installing a less - # recent version avoids the error until a possibly better fix is found. - # https://github.com/googleapis/python-bigquery/issues/655 - session.install("attrs==20.3.0") - session.install("-e", ".[all]") - session.install(PYTYPE_VERSION) - # See https://github.com/google/pytype/issues/464 - session.run("pytype", "-P", ".", "google/cloud/bigquery") + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" - constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") - # Sanity check: Only run system tests if the environment variable is set. - if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): - session.skip("Credentials must be set via environment variable.") + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") - # Use pre-release gRPC for system tests. - # Exclude version 1.49.0rc1 which has a known issue. - # See https://github.com/grpc/grpc/pull/30642 - session.install("--pre", "grpcio!=1.49.0rc1", "-c", constraints_path) - - # Install all test dependencies, then install local packages in place. - session.install( - "mock", "pytest", "psutil", "google-cloud-testutils", "-c", constraints_path - ) - if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") == "true": - # mTLS test requires pyopenssl and latest google-cloud-storage - session.install("google-cloud-storage", "pyopenssl") - else: - session.install("google-cloud-storage", "-c", constraints_path) - - # Data Catalog needed for the column ACL test with a real Policy Tag. - session.install("google-cloud-datacatalog", "-c", constraints_path) - - if session.python in ["3.11", "3.12"]: - extras = "[bqstorage,ipywidgets,pandas,tqdm,opentelemetry]" - else: - extras = "[all]" - session.install("-e", f".{extras}", "-c", constraints_path) + install_systemtest_dependencies(session, "-c", constraints_path) # Run py.test against the system tests. - session.run( - "py.test", - "--quiet", - os.path.join("tests", "system"), - *session.posargs, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def mypy_samples(session): - """Run type checks with mypy.""" - session.install("pytest") - for requirements_path in CURRENT_DIRECTORY.glob("samples/*/requirements.txt"): - session.install("-r", str(requirements_path)) - session.install(MYPY_VERSION) - - # requirements.txt might include this package. Install from source so that - # we can author samples with unreleased features. - session.install("-e", ".[all]") - - # Just install the dependencies' type info directly, since "mypy --install-types" - # might require an additional pass. - session.install( - "types-mock", - "types-pytz", - "types-protobuf!=4.24.0.20240106", # This version causes an error: 'Module "google.oauth2" has no attribute "service_account"' - "types-python-dateutil", - "types-requests", - "types-setuptools", - ) - - session.install("typing-extensions") # for TypedDict in pre-3.8 Python versions - - session.run( - "mypy", - "--config-file", - str(CURRENT_DIRECTORY / "samples" / "mypy.ini"), - "--no-incremental", # Required by warn-unused-configs from mypy.ini to work - "samples/", - ) - - -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def snippets(session): - """Run the snippets test suite.""" - - # Check the value of `RUN_SNIPPETS_TESTS` env var. It defaults to true. - if os.environ.get("RUN_SNIPPETS_TESTS", "true") == "false": - session.skip("RUN_SNIPPETS_TESTS is set to false, skipping") - - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - - # Install all test dependencies, then install local packages in place. - session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path) - session.install("google-cloud-storage", "-c", constraints_path) - session.install("grpcio", "-c", constraints_path) - - if session.python in ["3.11", "3.12"]: - extras = "[bqstorage,ipywidgets,pandas,tqdm,opentelemetry]" - else: - extras = "[all]" - session.install("-e", f".{extras}", "-c", constraints_path) - - # Run py.test against the snippets tests. - # Skip tests in samples/snippets, as those are run in a different session - # using the nox config from that directory. - session.run("py.test", os.path.join("docs", "snippets.py"), *session.posargs) - session.run( - "py.test", - "samples", - "--ignore=samples/desktopapp", - "--ignore=samples/magics", - "--ignore=samples/geography", - "--ignore=samples/notebooks", - "--ignore=samples/snippets", - *session.posargs, - ) + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -284,140 +277,15 @@ def cover(session): """ session.install("coverage", "pytest-cov") session.run("coverage", "report", "--show-missing", "--fail-under=100") - session.run("coverage", "erase") - -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def prerelease_deps(session): - """Run all tests with prerelease versions of dependencies installed. - - https://github.com/googleapis/python-bigquery/issues/95 - """ - # PyArrow prerelease packages are published to an alternative PyPI host. - # https://arrow.apache.org/docs/python/install.html#installing-nightly-packages - session.install( - "--extra-index-url", - "https://pypi.fury.io/arrow-nightlies/", - "--prefer-binary", - "--pre", - "--upgrade", - "pyarrow", - ) - session.install( - "--extra-index-url", - "https://pypi.anaconda.org/scipy-wheels-nightly/simple", - "--prefer-binary", - "--pre", - "--upgrade", - "pandas", - ) - session.install( - "--pre", - "--upgrade", - "IPython", - "ipykernel", - "ipywidgets", - "tqdm", - "git+https://github.com/pypa/packaging.git", - ) - - session.install( - "--pre", - "--upgrade", - "google-api-core", - "google-cloud-bigquery-storage", - "google-cloud-core", - "google-resumable-media", - # Exclude version 1.49.0rc1 which has a known issue. See https://github.com/grpc/grpc/pull/30642 - "grpcio!=1.49.0rc1", - ) - session.install( - "freezegun", - "google-cloud-datacatalog", - "google-cloud-storage", - "google-cloud-testutils", - "mock", - "psutil", - "pytest", - "pytest-cov", - ) - - # Because we test minimum dependency versions on the minimum Python - # version, the first version we test with in the unit tests sessions has a - # constraints file containing all dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - # We use --no-deps to ensure that pre-release versions aren't overwritten - # by the version ranges in setup.py. - session.install(*deps) - session.install("--no-deps", "-e", ".[all]") - - # Print out prerelease package versions. - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run("python", "-c", "import pandas; print(pandas.__version__)") - session.run("python", "-c", "import pyarrow; print(pyarrow.__version__)") - session.run("python", "-m", "pip", "freeze") - - # Run all tests, except a few samples tests which require extra dependencies. - session.run("py.test", "tests/unit") - session.run("py.test", "tests/system") - session.run("py.test", "samples/tests") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - - session.install("flake8", BLACK_VERSION) - session.install("-e", ".") - session.run("flake8", os.path.join("google", "cloud", "bigquery")) - session.run("flake8", "tests") - session.run("flake8", os.path.join("docs", "samples")) - session.run("flake8", os.path.join("docs", "snippets.py")) - session.run("flake8", "benchmark") - session.run("black", "--check", *BLACK_PATHS) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - - session.install("docutils", "Pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. - Format code to uniform standard. - """ - - session.install(BLACK_VERSION) - session.run("black", *BLACK_PATHS) + session.run("coverage", "erase") @nox.session(python="3.9") def docs(session): - """Build the docs.""" + """Build the docs for this library.""" + session.install("-e", ".") session.install( # We need to pin to specific versions of the `sphinxcontrib-*` packages # which still support sphinx 4.x. @@ -432,8 +300,6 @@ def docs(session): "alabaster", "recommonmark", ) - session.install("google-cloud-storage") - session.install("-e", ".[all]") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( @@ -494,3 +360,93 @@ def docfx(session): os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run("py.test", "tests/unit") + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) diff --git a/setup.py b/setup.py index 9fbc91e..207976d 100644 --- a/setup.py +++ b/setup.py @@ -20,70 +20,24 @@ # Package metadata. -name = "google-cloud-bigquery" -description = "Google BigQuery API client library" +name = "bigquery-magics" +description = "Google BigQuery magics for Jupyter and IPython" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' -# 'Development Status :: 5 - Production/Stable' -release_status = "Development Status :: 5 - Production/Stable" +# 'Development Status :: 5 - Production/Stable'`` +release_status = "Development Status :: 4 - Beta" dependencies = [ - # NOTE: Maintainers, please do not require google-api-core>=2.x.x - # Until this issue is closed - # https://github.com/googleapis/google-cloud-python/issues/10566 - "google-api-core >= 1.31.5, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0", - # NOTE: Maintainers, please do not require google-cloud-core>=2.x.x - # Until this issue is closed - # https://github.com/googleapis/google-cloud-python/issues/10566 - "google-cloud-core >= 1.6.0, <3.0.0dev", - "google-resumable-media >= 0.6.0, < 3.0dev", - "packaging >= 20.0.0", - "python-dateutil >= 2.7.2, <3.0dev", - "requests >= 2.21.0, < 3.0.0dev", + "db-dtypes>=0.3.0,<2.0.0dev", + "google-cloud-bigquery >= 3.0.0, <4.0.0dev", + "ipywidgets>=7.7.1", + "ipython>=7.23.1", + "ipykernel>=6.0.0", + "pandas>=1.1.0", + "pyarrow >= 3.0.0", ] -pyarrow_dependency = "pyarrow >= 3.0.0" -extras = { - # Keep the no-op bqstorage extra for backward compatibility. - # See: https://github.com/googleapis/python-bigquery/issues/757 - "bqstorage": [ - "google-cloud-bigquery-storage >= 2.6.0, <3.0.0dev", - # Due to an issue in pip's dependency resolver, the `grpc` extra is not - # installed, even though `google-cloud-bigquery-storage` specifies it - # as `google-api-core[grpc]`. We thus need to explicitly specify it here. - # See: https://github.com/googleapis/python-bigquery/issues/83 The - # grpc.Channel.close() method isn't added until 1.32.0. - # https://github.com/grpc/grpc/pull/15254 - "grpcio >= 1.47.0, < 2.0dev", - "grpcio >= 1.49.1, < 2.0dev; python_version>='3.11'", - pyarrow_dependency, - ], - "pandas": [ - "pandas>=1.1.0", - pyarrow_dependency, - "db-dtypes>=0.3.0,<2.0.0dev", - "importlib_metadata>=1.0.0; python_version<'3.8'", - ], - "ipywidgets": [ - "ipywidgets>=7.7.0", - "ipykernel>=6.0.0", - ], - "geopandas": ["geopandas>=0.9.0, <1.0dev", "Shapely>=1.8.4, <3.0.0dev"], - "ipython": [ - "ipython>=7.23.1,!=8.1.0", - "ipykernel>=6.0.0", - ], - "tqdm": ["tqdm >= 4.7.4, <5.0.0dev"], - "opentelemetry": [ - "opentelemetry-api >= 1.1.0", - "opentelemetry-sdk >= 1.1.0", - "opentelemetry-instrumentation >= 0.20b0", - ], - "bigquery_v2": [ - "proto-plus >= 1.15.0, <2.0.0dev", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", # For the legacy proto-based types. - ], -} +extras = {} all_extras = [] @@ -101,16 +55,16 @@ readme = readme_file.read() version = {} -with open(os.path.join(package_root, "google/cloud/bigquery/version.py")) as fp: +with open(os.path.join(package_root, "bigquery_magics/version.py")) as fp: exec(fp.read(), version) version = version["__version__"] -# Only include packages under the 'google' namespace. Do not include tests, +# Only include packages under the 'bigquery_magics' namespace. Do not include tests, # benchmarks, etc. packages = [ package for package in setuptools.find_namespace_packages() - if package.startswith("google") + if package.startswith("bigquery_magics") ] setuptools.setup( @@ -121,7 +75,7 @@ author="Google LLC", author_email="googleapis-packages@google.com", license="Apache 2.0", - url="https://github.com/googleapis/python-bigquery", + url="https://github.com/googleapis/python-bigquery-magics", classifiers=[ release_status, "Intended Audience :: Developers", diff --git a/testing/constraints-3.10.txt b/testing/constraints-3.10.txt index c5e37fc..0f21d63 100644 --- a/testing/constraints-3.10.txt +++ b/testing/constraints-3.10.txt @@ -1 +1,2 @@ -grpcio==1.47.0 +# IMPORTANT: When Python 3.9 support is dropped, update these to +# match the minimums in setup.py. diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt index e69de29..84faed2 100644 --- a/testing/constraints-3.11.txt +++ b/testing/constraints-3.11.txt @@ -0,0 +1,2 @@ +# IMPORTANT: When Python 3.10 support is dropped, update these to +# match the minimums in setup.py. diff --git a/testing/constraints-3.12.txt b/testing/constraints-3.12.txt index e69de29..726eb92 100644 --- a/testing/constraints-3.12.txt +++ b/testing/constraints-3.12.txt @@ -0,0 +1,2 @@ +# IMPORTANT: When Python 3.11 support is dropped, update these to +# match the minimums in setup.py. diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index 2ea482e..bd915d1 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -6,25 +6,9 @@ # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 db-dtypes==0.3.0 -geopandas==0.9.0 -google-api-core==1.31.5 -google-cloud-bigquery-storage==2.6.0 -google-cloud-core==1.6.0 -google-resumable-media==0.6.0 -grpcio==1.47.0 +google-cloud-bigquery==3.0.0 ipywidgets==7.7.1 ipython==7.23.1 ipykernel==6.0.0 -opentelemetry-api==1.1.0 -opentelemetry-instrumentation==0.20b0 -opentelemetry-sdk==1.1.0 -packaging==20.0.0 pandas==1.1.0 -proto-plus==1.22.0 -protobuf==3.19.5 pyarrow==3.0.0 -python-dateutil==2.7.3 -requests==2.21.0 -Shapely==1.8.4 -six==1.13.0 -tqdm==4.7.4 diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt index e5e73c5..a47a67b 100644 --- a/testing/constraints-3.8.txt +++ b/testing/constraints-3.8.txt @@ -1,2 +1,8 @@ -grpcio==1.47.0 -pandas==1.2.0 +# IMPORTANT: When Python 3.7 support is dropped, update these to +# match the minimums in setup.py. +# +# We try to test across major versions of our dependencies. +# This is the last ipython 7.x release +ipython==7.34.0 +# This is the last pandas 1.5.x release. +pandas==1.5.3 diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt index d4c3028..66cf6ee 100644 --- a/testing/constraints-3.9.txt +++ b/testing/constraints-3.9.txt @@ -1,8 +1,7 @@ -# This constraints file is used to make sure that the latest dependency versions -# we claim to support in setup.py are indeed installed in test sessions in the most -# recent Python version supported (3.9 at the time of writing - 2021-05-05). +# IMPORTANT: When Python 3.8 support is dropped, update these to +# match the minimums in setup.py. # -# NOTE: Not comprehensive yet, will eventually be maintained semi-automatically by -# the renovate bot. -grpcio==1.47.0 -pyarrow>=4.0.0 +# We try to test across major versions of our dependencies. +# This is the last pandas 2.0.x release. +pandas==2.0.3 + diff --git a/tests/system/test_bigquery.py b/tests/system/test_bigquery.py new file mode 100644 index 0000000..3d761cd --- /dev/null +++ b/tests/system/test_bigquery.py @@ -0,0 +1,82 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""System tests for Jupyter/IPython connector.""" + +import re + +import pytest +import psutil + + +IPython = pytest.importorskip("IPython") +io = pytest.importorskip("IPython.utils.io") +pandas = pytest.importorskip("pandas") +tools = pytest.importorskip("IPython.testing.tools") +interactiveshell = pytest.importorskip("IPython.terminal.interactiveshell") + + +@pytest.fixture(scope="session") +def ipython(): + config = tools.default_config() + config.TerminalInteractiveShell.simple_prompt = True + shell = interactiveshell.TerminalInteractiveShell.instance(config=config) + return shell + + +@pytest.fixture() +def ipython_interactive(ipython): + """Activate IPython's builtin hooks + + for the duration of the test scope. + """ + with ipython.builtin_trap: + yield ipython + + +def test_bigquery_magic(ipython_interactive): + ip = IPython.get_ipython() + current_process = psutil.Process() + conn_count_start = len(current_process.connections()) + + ip.extension_manager.load_extension("google.cloud.bigquery") + sql = """ + SELECT + CONCAT( + 'https://stackoverflow.com/questions/', + CAST(id as STRING)) as url, + view_count + FROM `bigquery-public-data.stackoverflow.posts_questions` + WHERE tags like '%google-bigquery%' + ORDER BY view_count DESC + LIMIT 10 + """ + with io.capture_output() as captured: + result = ip.run_cell_magic("bigquery", "--use_rest_api", sql) + + conn_count_end = len(current_process.connections()) + + lines = re.split("\n|\r", captured.stdout) + # Removes blanks & terminal code (result of display clearing) + updates = list(filter(lambda x: bool(x) and x != "\x1b[2K", lines)) + assert re.match("Executing query with job ID: .*", updates[0]) + assert (re.match("Query executing: .*s", line) for line in updates[1:-1]) + assert isinstance(result, pandas.DataFrame) + assert len(result) == 10 # verify row count + assert list(result) == ["url", "view_count"] # verify column names + + # NOTE: For some reason, the number of open sockets is sometimes one *less* + # than expected when running system tests on Kokoro, thus using the <= assertion. + # That's still fine, however, since the sockets are apparently not leaked. + assert conn_count_end <= conn_count_start # system resources are released diff --git a/tests/unit/line_arg_parser/__init__.py b/tests/unit/line_arg_parser/__init__.py new file mode 100644 index 0000000..c633424 --- /dev/null +++ b/tests/unit/line_arg_parser/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/unit/line_arg_parser/test_lexer.py b/tests/unit/line_arg_parser/test_lexer.py new file mode 100644 index 0000000..3624ed0 --- /dev/null +++ b/tests/unit/line_arg_parser/test_lexer.py @@ -0,0 +1,34 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +IPython = pytest.importorskip("IPython") + + +@pytest.fixture(scope="session") +def lexer_class(): + from google.cloud.bigquery.magics.line_arg_parser.lexer import Lexer + + return Lexer + + +def test_empy_input(lexer_class): + from google.cloud.bigquery.magics.line_arg_parser import TokenType + from google.cloud.bigquery.magics.line_arg_parser.lexer import Token + + lexer = lexer_class("") + tokens = list(lexer) + + assert tokens == [Token(TokenType.EOL, lexeme="", pos=0)] diff --git a/tests/unit/line_arg_parser/test_parser.py b/tests/unit/line_arg_parser/test_parser.py new file mode 100644 index 0000000..b170d53 --- /dev/null +++ b/tests/unit/line_arg_parser/test_parser.py @@ -0,0 +1,206 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +IPython = pytest.importorskip("IPython") + + +@pytest.fixture(scope="session") +def parser_class(): + from google.cloud.bigquery.magics.line_arg_parser.parser import Parser + + return Parser + + +def test_consume_expected_eol(parser_class): + from google.cloud.bigquery.magics.line_arg_parser import TokenType + from google.cloud.bigquery.magics.line_arg_parser.lexer import Token + + # A simple iterable of Tokens is sufficient. + fake_lexer = [Token(TokenType.EOL, lexeme="", pos=0)] + parser = parser_class(fake_lexer) + + parser.consume(TokenType.EOL) # no error + + +def test_consume_unexpected_eol(parser_class): + from google.cloud.bigquery.magics.line_arg_parser import ParseError + from google.cloud.bigquery.magics.line_arg_parser import TokenType + from google.cloud.bigquery.magics.line_arg_parser.lexer import Token + + # A simple iterable of Tokens is sufficient. + fake_lexer = [Token(TokenType.EOL, lexeme="", pos=0)] + parser = parser_class(fake_lexer) + + with pytest.raises(ParseError, match=r"Unexpected end of input.*expected.*COLON.*"): + parser.consume(TokenType.COLON) + + +def test_input_line_unexpected_input(parser_class): + from google.cloud.bigquery.magics.line_arg_parser import ParseError + from google.cloud.bigquery.magics.line_arg_parser import TokenType + from google.cloud.bigquery.magics.line_arg_parser.lexer import Token + + # A simple iterable of Tokens is sufficient. + fake_lexer = [ + Token(TokenType.DEST_VAR, lexeme="results", pos=0), + Token(TokenType.UNKNOWN, lexeme="boo!", pos=8), + Token(TokenType.EOL, lexeme="", pos=12), + ] + parser = parser_class(fake_lexer) + + with pytest.raises(ParseError, match=r"Unexpected input.*position 8.*boo!.*"): + parser.input_line() + + +def test_destination_var_unexpected_input(parser_class): + from google.cloud.bigquery.magics.line_arg_parser import ParseError + from google.cloud.bigquery.magics.line_arg_parser import TokenType + from google.cloud.bigquery.magics.line_arg_parser.lexer import Token + + # A simple iterable of Tokens is sufficient. + fake_lexer = [ + Token(TokenType.UNKNOWN, lexeme="@!#", pos=2), + Token(TokenType.EOL, lexeme="", pos=5), + ] + parser = parser_class(fake_lexer) + + with pytest.raises(ParseError, match=r"Unknown.*position 2.*@!#.*"): + parser.destination_var() + + +def test_option_value_unexpected_input(parser_class): + from google.cloud.bigquery.magics.line_arg_parser import ParseError + from google.cloud.bigquery.magics.line_arg_parser import TokenType + from google.cloud.bigquery.magics.line_arg_parser.lexer import Token + + # A simple iterable of Tokens is sufficient. + fake_lexer = [ + Token(TokenType.UNKNOWN, lexeme="@!#", pos=8), + Token(TokenType.OPTION_SPEC, lexeme="--foo", pos=13), + ] + parser = parser_class(fake_lexer) + + with pytest.raises(ParseError, match=r"Unknown input.*position 8.*@!#.*"): + parser.option_value() + + +def test_dict_items_empty_dict(parser_class): + from google.cloud.bigquery.magics.line_arg_parser import TokenType + from google.cloud.bigquery.magics.line_arg_parser.lexer import Token + + # A simple iterable of Tokens is sufficient. + fake_lexer = [Token(TokenType.RCURL, lexeme="}", pos=22)] + parser = parser_class(fake_lexer) + + result = parser.dict_items() + + assert result == [] + + +def test_dict_items_trailing_comma(parser_class): + from google.cloud.bigquery.magics.line_arg_parser import TokenType + from google.cloud.bigquery.magics.line_arg_parser.lexer import Token + + # A simple iterable of Tokens is sufficient. + fake_lexer = [ + Token(TokenType.PY_STRING, lexeme="'age'", pos=10), + Token(TokenType.COLON, lexeme=":", pos=17), + Token(TokenType.PY_NUMBER, lexeme="18", pos=19), + Token(TokenType.COMMA, lexeme=",", pos=21), + Token(TokenType.RCURL, lexeme="}", pos=22), + ] + parser = parser_class(fake_lexer) + + result = parser.dict_items() + + assert len(result) == 1 + dict_item = result[0] + assert dict_item.key.key_value == "'age'" + assert dict_item.value.raw_value == "18" + + +def test_dict_item_unknown_input(parser_class): + from google.cloud.bigquery.magics.line_arg_parser import ParseError + from google.cloud.bigquery.magics.line_arg_parser import TokenType + from google.cloud.bigquery.magics.line_arg_parser.lexer import Token + + # A simple iterable of Tokens is sufficient. + fake_lexer = [Token(TokenType.UNKNOWN, lexeme="#/%", pos=35)] + parser = parser_class(fake_lexer) + + with pytest.raises(ParseError, match=r"Unknown.*position 35.*#/%.*"): + parser.dict_item() + + +def test_pyvalue_list_containing_dict(parser_class): + from google.cloud.bigquery.magics.line_arg_parser import TokenType + from google.cloud.bigquery.magics.line_arg_parser.lexer import Token + from google.cloud.bigquery.magics.line_arg_parser.parser import PyDict + from google.cloud.bigquery.magics.line_arg_parser.parser import PyList + + # A simple iterable of Tokens is sufficient. + fake_lexer = [ + Token(TokenType.LSQUARE, lexeme="[", pos=21), + Token(TokenType.LCURL, lexeme="{", pos=22), + Token(TokenType.PY_STRING, lexeme="'age'", pos=23), + Token(TokenType.COLON, lexeme=":", pos=28), + Token(TokenType.PY_NUMBER, lexeme="18", pos=30), + Token(TokenType.RCURL, lexeme="}", pos=32), + Token(TokenType.COMMA, lexeme=",", pos=33), # trailing comma + Token(TokenType.RSQUARE, lexeme="]", pos=34), + Token(TokenType.EOL, lexeme="", pos=40), + ] + parser = parser_class(fake_lexer) + + result = parser.py_value() + + assert isinstance(result, PyList) + assert len(result.items) == 1 + + element = result.items[0] + assert isinstance(element, PyDict) + assert len(element.items) == 1 + + dict_item = element.items[0] + assert dict_item.key.key_value == "'age'" + assert dict_item.value.raw_value == "18" + + +def test_pyvalue_invalid_token(parser_class): + from google.cloud.bigquery.magics.line_arg_parser import ParseError + from google.cloud.bigquery.magics.line_arg_parser import TokenType + from google.cloud.bigquery.magics.line_arg_parser.lexer import Token + + # A simple iterable of Tokens is sufficient. + fake_lexer = [Token(TokenType.OPTION_SPEC, lexeme="--verbose", pos=75)] + parser = parser_class(fake_lexer) + + error_pattern = r"Unexpected token.*OPTION_SPEC.*position 75.*" + with pytest.raises(ParseError, match=error_pattern): + parser.py_value() + + +def test_collection_items_empty(parser_class): + from google.cloud.bigquery.magics.line_arg_parser import TokenType + from google.cloud.bigquery.magics.line_arg_parser.lexer import Token + + # A simple iterable of Tokens is sufficient. + fake_lexer = [Token(TokenType.RPAREN, lexeme=")", pos=30)] + parser = parser_class(fake_lexer) + + result = parser.collection_items() + + assert result == [] diff --git a/tests/unit/line_arg_parser/test_visitors.py b/tests/unit/line_arg_parser/test_visitors.py new file mode 100644 index 0000000..288ef5f --- /dev/null +++ b/tests/unit/line_arg_parser/test_visitors.py @@ -0,0 +1,36 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +IPython = pytest.importorskip("IPython") + + +@pytest.fixture +def base_visitor(): + from google.cloud.bigquery.magics.line_arg_parser.visitors import NodeVisitor + + return NodeVisitor() + + +def test_unknown_node(base_visitor): + from google.cloud.bigquery.magics.line_arg_parser.parser import ParseNode + + class UnknownNode(ParseNode): + pass + + node = UnknownNode() + + with pytest.raises(Exception, match=r"No visit_UnknownNode method"): + base_visitor.visit(node) diff --git a/tests/unit/test_bigquery.py b/tests/unit/test_bigquery.py new file mode 100644 index 0000000..4b1aaf1 --- /dev/null +++ b/tests/unit/test_bigquery.py @@ -0,0 +1,2073 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import copy +import re +from concurrent import futures +from unittest import mock +import warnings + +from google.api_core import exceptions +import google.auth.credentials +import pytest +from tests.unit.helpers import make_connection +from test_utils.imports import maybe_fail_import + +from google.cloud import bigquery +from google.cloud.bigquery import exceptions as bq_exceptions +from google.cloud.bigquery import job +from google.cloud.bigquery import table +from google.cloud.bigquery.retry import DEFAULT_TIMEOUT + + +try: + from google.cloud.bigquery.magics import magics +except ImportError: + magics = None + +bigquery_storage = pytest.importorskip("google.cloud.bigquery_storage") +IPython = pytest.importorskip("IPython") +interactiveshell = pytest.importorskip("IPython.terminal.interactiveshell") +tools = pytest.importorskip("IPython.testing.tools") +io = pytest.importorskip("IPython.utils.io") +pandas = pytest.importorskip("pandas") + + +@pytest.fixture(scope="session") +def ipython(): + config = tools.default_config() + config.TerminalInteractiveShell.simple_prompt = True + shell = interactiveshell.TerminalInteractiveShell.instance(config=config) + return shell + + +@pytest.fixture() +def ipython_interactive(request, ipython): + """Activate IPython's builtin hooks + + for the duration of the test scope. + """ + with ipython.builtin_trap: + yield ipython + + +@pytest.fixture() +def ipython_ns_cleanup(): + """A helper to clean up user namespace after the test + + for the duration of the test scope. + """ + names_to_clean = [] # pairs (IPython_instance, name_to_clean) + + yield names_to_clean + + for ip, name in names_to_clean: + if name in ip.user_ns: + del ip.user_ns[name] + + +@pytest.fixture(scope="session") +def missing_bq_storage(): + """Provide a patcher that can make the bigquery storage import to fail.""" + + def fail_if(name, globals, locals, fromlist, level): + # NOTE: *very* simplified, assuming a straightforward absolute import + return "bigquery_storage" in name or ( + fromlist is not None and "bigquery_storage" in fromlist + ) + + return maybe_fail_import(predicate=fail_if) + + +@pytest.fixture(scope="session") +def missing_grpcio_lib(): + """Provide a patcher that can make the gapic library import to fail.""" + + def fail_if(name, globals, locals, fromlist, level): + # NOTE: *very* simplified, assuming a straightforward absolute import + return "gapic_v1" in name or (fromlist is not None and "gapic_v1" in fromlist) + + return maybe_fail_import(predicate=fail_if) + + +PROJECT_ID = "its-a-project-eh" +JOB_ID = "some-random-id" +JOB_REFERENCE_RESOURCE = {"projectId": PROJECT_ID, "jobId": JOB_ID} +DATASET_ID = "dest_dataset" +TABLE_ID = "dest_table" +TABLE_REFERENCE_RESOURCE = { + "projectId": PROJECT_ID, + "datasetId": DATASET_ID, + "tableId": TABLE_ID, +} +QUERY_STRING = "SELECT 42 AS the_answer FROM `life.the_universe.and_everything`;" +QUERY_RESOURCE = { + "jobReference": JOB_REFERENCE_RESOURCE, + "configuration": { + "query": { + "destinationTable": TABLE_REFERENCE_RESOURCE, + "query": QUERY_STRING, + "queryParameters": [], + "useLegacySql": False, + } + }, + "status": {"state": "DONE"}, +} +QUERY_RESULTS_RESOURCE = { + "jobReference": JOB_REFERENCE_RESOURCE, + "totalRows": 1, + "jobComplete": True, + "schema": {"fields": [{"name": "the_answer", "type": "INTEGER"}]}, +} + + +def test_context_with_default_credentials(): + """When Application Default Credentials are set, the context credentials + will be created the first time it is called + """ + assert magics.context._credentials is None + assert magics.context._project is None + + project = "prahj-ekt" + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + default_patch = mock.patch( + "google.auth.default", return_value=(credentials_mock, project) + ) + with default_patch as default_mock: + assert magics.context.credentials is credentials_mock + assert magics.context.project == project + + assert default_mock.call_count == 2 + + +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_context_with_default_connection(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context._credentials = None + magics.context._project = None + magics.context._connection = None + + default_credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + credentials_patch = mock.patch( + "google.auth.default", return_value=(default_credentials, "project-from-env") + ) + default_conn = make_connection(QUERY_RESOURCE, QUERY_RESULTS_RESOURCE) + conn_patch = mock.patch("google.cloud.bigquery.client.Connection", autospec=True) + list_rows_patch = mock.patch( + "google.cloud.bigquery.client.Client._list_rows_from_query_results", + return_value=google.cloud.bigquery.table._EmptyRowIterator(), + ) + + with conn_patch as conn, credentials_patch, list_rows_patch as list_rows: + conn.return_value = default_conn + ip.run_cell_magic("bigquery", "", QUERY_STRING) + + # Check that query actually starts the job. + conn.assert_called() + list_rows.assert_called() + begin_call = mock.call( + method="POST", + path="/projects/project-from-env/jobs", + data=mock.ANY, + timeout=DEFAULT_TIMEOUT, + ) + query_results_call = mock.call( + method="GET", + path=f"/projects/{PROJECT_ID}/queries/{JOB_ID}", + query_params=mock.ANY, + timeout=mock.ANY, + ) + default_conn.api_request.assert_has_calls([begin_call, query_results_call]) + + +def test_context_credentials_and_project_can_be_set_explicitly(): + project1 = "one-project-55564" + project2 = "other-project-52569" + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + default_patch = mock.patch( + "google.auth.default", return_value=(credentials_mock, project1) + ) + with default_patch as default_mock: + magics.context.credentials = credentials_mock + magics.context.project = project2 + + assert magics.context.project == project2 + assert magics.context.credentials is credentials_mock + # default should not be called if credentials & project are explicitly set + assert default_mock.call_count == 0 + + +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_context_with_custom_connection(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context._project = None + magics.context._credentials = None + context_conn = magics.context._connection = make_connection( + QUERY_RESOURCE, QUERY_RESULTS_RESOURCE + ) + + default_credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + credentials_patch = mock.patch( + "google.auth.default", return_value=(default_credentials, "project-from-env") + ) + default_conn = make_connection() + conn_patch = mock.patch("google.cloud.bigquery.client.Connection", autospec=True) + list_rows_patch = mock.patch( + "google.cloud.bigquery.client.Client._list_rows_from_query_results", + return_value=google.cloud.bigquery.table._EmptyRowIterator(), + ) + + with conn_patch as conn, credentials_patch, list_rows_patch as list_rows: + conn.return_value = default_conn + ip.run_cell_magic("bigquery", "", QUERY_STRING) + + list_rows.assert_called() + default_conn.api_request.assert_not_called() + begin_call = mock.call( + method="POST", + path="/projects/project-from-env/jobs", + data=mock.ANY, + timeout=DEFAULT_TIMEOUT, + ) + query_results_call = mock.call( + method="GET", + path=f"/projects/{PROJECT_ID}/queries/{JOB_ID}", + query_params=mock.ANY, + timeout=mock.ANY, + ) + context_conn.api_request.assert_has_calls([begin_call, query_results_call]) + + +def test__run_query(): + magics.context._credentials = None + + job_id = "job_1234" + sql = "SELECT 17" + responses = [ + futures.TimeoutError, + futures.TimeoutError, + [table.Row((17,), {"num": 0})], + ] + + client_patch = mock.patch( + "google.cloud.bigquery.magics.magics.bigquery.Client", autospec=True + ) + with client_patch as client_mock, io.capture_output() as captured: + client_mock().query(sql).result.side_effect = responses + client_mock().query(sql).job_id = job_id + + query_job = magics._run_query(client_mock(), sql) + + lines = re.split("\n|\r", captured.stdout) + # Removes blanks & terminal code (result of display clearing) + updates = list(filter(lambda x: bool(x) and x != "\x1b[2K", lines)) + + assert query_job.job_id == job_id + expected_first_line = "Executing query with job ID: {}".format(job_id) + assert updates[0] == expected_first_line + execution_updates = updates[1:-1] + assert len(execution_updates) == 3 # one update per API response + for line in execution_updates: + assert re.match("Query executing: .*s", line) + + +def test__run_query_dry_run_without_errors_is_silent(): + magics.context._credentials = None + + sql = "SELECT 17" + + client_patch = mock.patch( + "google.cloud.bigquery.magics.magics.bigquery.Client", autospec=True + ) + + job_config = job.QueryJobConfig() + job_config.dry_run = True + with client_patch as client_mock, io.capture_output() as captured: + client_mock().query(sql).job_id = None + magics._run_query(client_mock(), sql, job_config=job_config) + + assert len(captured.stderr) == 0 + assert len(captured.stdout) == 0 + + +def test__make_bqstorage_client_false(): + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + test_client = bigquery.Client( + project="test_project", credentials=credentials_mock, location="test_location" + ) + got = magics._make_bqstorage_client(test_client, False, {}) + assert got is None + + +@pytest.mark.skipif( + bigquery_storage is None, reason="Requires `google-cloud-bigquery-storage`" +) +def test__make_bqstorage_client_true(): + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + test_client = bigquery.Client( + project="test_project", credentials=credentials_mock, location="test_location" + ) + got = magics._make_bqstorage_client(test_client, True, {}) + assert isinstance(got, bigquery_storage.BigQueryReadClient) + + +def test__make_bqstorage_client_true_raises_import_error(missing_bq_storage): + """When package `google-cloud-bigquery-storage` is not installed, reports + ImportError. + """ + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + test_client = bigquery.Client( + project="test_project", credentials=credentials_mock, location="test_location" + ) + + with pytest.raises(ImportError) as exc_context, missing_bq_storage: + magics._make_bqstorage_client(test_client, True, {}) + + error_msg = str(exc_context.value) + assert "google-cloud-bigquery-storage" in error_msg + assert "pyarrow" in error_msg + + +@pytest.mark.skipif( + bigquery_storage is None, reason="Requires `google-cloud-bigquery-storage`" +) +def test__make_bqstorage_client_true_obsolete_dependency(): + """When package `google-cloud-bigquery-storage` is installed but has outdated + version, returns None, and raises a warning. + """ + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + test_client = bigquery.Client( + project="test_project", credentials=credentials_mock, location="test_location" + ) + + patcher = mock.patch( + "google.cloud.bigquery._versions_helpers.BQ_STORAGE_VERSIONS.try_import", + side_effect=bq_exceptions.LegacyBigQueryStorageError( + "google-cloud-bigquery-storage is outdated" + ), + ) + with patcher, warnings.catch_warnings(record=True) as warned: + got = magics._make_bqstorage_client(test_client, True, {}) + + assert got is None + + matching_warnings = [ + warning + for warning in warned + if "google-cloud-bigquery-storage is outdated" in str(warning) + ] + assert matching_warnings, "Obsolete dependency warning not raised." + + +@pytest.mark.skipif( + bigquery_storage is None, reason="Requires `google-cloud-bigquery-storage`" +) +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test__make_bqstorage_client_true_missing_gapic(missing_grpcio_lib): + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + with pytest.raises(ImportError) as exc_context, missing_grpcio_lib: + magics._make_bqstorage_client(True, credentials_mock, {}) + + assert "grpcio" in str(exc_context.value) + + +def test__create_dataset_if_necessary_exists(): + project = "project_id" + dataset_id = "dataset_id" + dataset_reference = bigquery.dataset.DatasetReference(project, dataset_id) + dataset = bigquery.Dataset(dataset_reference) + client_patch = mock.patch( + "google.cloud.bigquery.magics.magics.bigquery.Client", autospec=True + ) + with client_patch as client_mock: + client = client_mock() + client.project = project + client.get_dataset.result_value = dataset + magics._create_dataset_if_necessary(client, dataset_id) + client.create_dataset.assert_not_called() + + +def test__create_dataset_if_necessary_not_exist(): + project = "project_id" + dataset_id = "dataset_id" + client_patch = mock.patch( + "google.cloud.bigquery.magics.magics.bigquery.Client", autospec=True + ) + with client_patch as client_mock: + client = client_mock() + client.location = "us" + client.project = project + client.get_dataset.side_effect = exceptions.NotFound("dataset not found") + magics._create_dataset_if_necessary(client, dataset_id) + client.create_dataset.assert_called_once() + + +@pytest.mark.usefixtures("ipython_interactive") +def test_extension_load(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + + # verify that the magic is registered and has the correct source + magic = ip.magics_manager.magics["cell"].get("bigquery") + assert magic.__module__ == "google.cloud.bigquery.magics.magics" + + +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +@pytest.mark.skipif( + bigquery_storage is None, reason="Requires `google-cloud-bigquery-storage`" +) +def test_bigquery_magic_without_optional_arguments(monkeypatch): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + mock_credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + # Set up the context with monkeypatch so that it's reset for subsequent + # tests. + monkeypatch.setattr(magics.context, "_credentials", mock_credentials) + + # Mock out the BigQuery Storage API. + bqstorage_mock = mock.create_autospec(bigquery_storage.BigQueryReadClient) + bqstorage_instance_mock = mock.create_autospec( + bigquery_storage.BigQueryReadClient, instance=True + ) + bqstorage_instance_mock._transport = mock.Mock() + bqstorage_mock.return_value = bqstorage_instance_mock + bqstorage_client_patch = mock.patch( + "google.cloud.bigquery_storage.BigQueryReadClient", bqstorage_mock + ) + + sql = "SELECT 17 AS num" + result = pandas.DataFrame([17], columns=["num"]) + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + query_job_mock = mock.create_autospec( + google.cloud.bigquery.job.QueryJob, instance=True + ) + query_job_mock.to_dataframe.return_value = result + + with run_query_patch as run_query_mock, bqstorage_client_patch: + run_query_mock.return_value = query_job_mock + return_value = ip.run_cell_magic("bigquery", "", sql) + + assert bqstorage_mock.called # BQ storage client was used + assert isinstance(return_value, pandas.DataFrame) + assert len(return_value) == len(result) # verify row count + assert list(return_value) == list(result) # verify column names + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_default_connection_user_agent(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context._connection = None + + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + default_patch = mock.patch( + "google.auth.default", return_value=(credentials_mock, "general-project") + ) + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + conn_patch = mock.patch("google.cloud.bigquery.client.Connection", autospec=True) + + with conn_patch as conn, run_query_patch, default_patch: + ip.run_cell_magic("bigquery", "", "SELECT 17 as num") + + client_info_arg = conn.call_args[1].get("client_info") + assert client_info_arg is not None + assert client_info_arg.user_agent == "ipython-" + IPython.__version__ + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_with_legacy_sql(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + with run_query_patch as run_query_mock: + ip.run_cell_magic("bigquery", "--use_legacy_sql", "SELECT 17 AS num") + + job_config_used = run_query_mock.call_args_list[0][1]["job_config"] + assert job_config_used.use_legacy_sql is True + + +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_bigquery_magic_with_result_saved_to_variable(ipython_ns_cleanup): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + ipython_ns_cleanup.append((ip, "df")) + + sql = "SELECT 17 AS num" + result = pandas.DataFrame([17], columns=["num"]) + assert "df" not in ip.user_ns + + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + query_job_mock = mock.create_autospec( + google.cloud.bigquery.job.QueryJob, instance=True + ) + query_job_mock.to_dataframe.return_value = result + with run_query_patch as run_query_mock: + run_query_mock.return_value = query_job_mock + + return_value = ip.run_cell_magic("bigquery", "df", sql) + + assert return_value is None + assert "df" in ip.user_ns # verify that variable exists + df = ip.user_ns["df"] + assert len(df) == len(result) # verify row count + assert list(df) == list(result) # verify column names + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_does_not_clear_display_in_verbose_mode(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + clear_patch = mock.patch( + "google.cloud.bigquery.magics.magics.display.clear_output", + autospec=True, + ) + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + with clear_patch as clear_mock, run_query_patch: + ip.run_cell_magic("bigquery", "--verbose", "SELECT 17 as num") + + assert clear_mock.call_count == 0 + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_clears_display_in_non_verbose_mode(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + clear_patch = mock.patch( + "google.cloud.bigquery.magics.magics.display.clear_output", + autospec=True, + ) + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + with clear_patch as clear_mock, run_query_patch: + ip.run_cell_magic("bigquery", "", "SELECT 17 as num") + + assert clear_mock.call_count == 1 + + +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif( + bigquery_storage is None, reason="Requires `google-cloud-bigquery-storage`" +) +def test_bigquery_magic_with_bqstorage_from_argument(monkeypatch): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + mock_credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + # Set up the context with monkeypatch so that it's reset for subsequent + # tests. + monkeypatch.setattr(magics.context, "_credentials", mock_credentials) + + # Mock out the BigQuery Storage API. + bqstorage_mock = mock.create_autospec(bigquery_storage.BigQueryReadClient) + bqstorage_instance_mock = mock.create_autospec( + bigquery_storage.BigQueryReadClient, instance=True + ) + bqstorage_instance_mock._transport = mock.Mock() + bqstorage_mock.return_value = bqstorage_instance_mock + bqstorage_client_patch = mock.patch( + "google.cloud.bigquery_storage.BigQueryReadClient", bqstorage_mock + ) + + sql = "SELECT 17 AS num" + result = pandas.DataFrame([17], columns=["num"]) + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + query_job_mock = mock.create_autospec( + google.cloud.bigquery.job.QueryJob, instance=True + ) + query_job_mock.to_dataframe.return_value = result + with run_query_patch as run_query_mock, ( + bqstorage_client_patch + ), warnings.catch_warnings(record=True) as warned: + run_query_mock.return_value = query_job_mock + + return_value = ip.run_cell_magic("bigquery", "--use_bqstorage_api", sql) + + # Deprecation warning should have been issued. + def warning_match(warning): + message = str(warning).lower() + return "deprecated" in message and "use_bqstorage_api" in message + + expected_warnings = list(filter(warning_match, warned)) + assert len(expected_warnings) == 1 + + assert len(bqstorage_mock.call_args_list) == 1 + kwargs = bqstorage_mock.call_args_list[0][1] + assert kwargs.get("credentials") is mock_credentials + client_info = kwargs.get("client_info") + assert client_info is not None + assert client_info.user_agent == "ipython-" + IPython.__version__ + + query_job_mock.to_dataframe.assert_called_once_with( + bqstorage_client=bqstorage_instance_mock, + create_bqstorage_client=mock.ANY, + progress_bar_type="tqdm_notebook", + ) + + assert isinstance(return_value, pandas.DataFrame) + + +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif( + bigquery_storage is None, reason="Requires `google-cloud-bigquery-storage`" +) +def test_bigquery_magic_with_rest_client_requested(monkeypatch): + pandas = pytest.importorskip("pandas") + + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + mock_credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + # Set up the context with monkeypatch so that it's reset for subsequent + # tests. + monkeypatch.setattr(magics.context, "_credentials", mock_credentials) + + # Mock out the BigQuery Storage API. + bqstorage_mock = mock.create_autospec(bigquery_storage.BigQueryReadClient) + bqstorage_client_patch = mock.patch( + "google.cloud.bigquery_storage.BigQueryReadClient", bqstorage_mock + ) + + sql = "SELECT 17 AS num" + result = pandas.DataFrame([17], columns=["num"]) + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + query_job_mock = mock.create_autospec( + google.cloud.bigquery.job.QueryJob, instance=True + ) + query_job_mock.to_dataframe.return_value = result + with run_query_patch as run_query_mock, bqstorage_client_patch: + run_query_mock.return_value = query_job_mock + + return_value = ip.run_cell_magic("bigquery", "--use_rest_api", sql) + + bqstorage_mock.assert_not_called() + query_job_mock.to_dataframe.assert_called_once_with( + bqstorage_client=None, + create_bqstorage_client=False, + progress_bar_type="tqdm_notebook", + ) + + assert isinstance(return_value, pandas.DataFrame) + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_w_max_results_invalid(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context._project = None + + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + default_patch = mock.patch( + "google.auth.default", return_value=(credentials_mock, "general-project") + ) + client_query_patch = mock.patch( + "google.cloud.bigquery.client.Client.query", autospec=True + ) + + sql = "SELECT 17 AS num" + + with pytest.raises(ValueError), default_patch, client_query_patch: + ip.run_cell_magic("bigquery", "--max_results=abc", sql) + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_w_max_results_valid_calls_queryjob_result(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context._project = None + + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + default_patch = mock.patch( + "google.auth.default", return_value=(credentials_mock, "general-project") + ) + client_query_patch = mock.patch( + "google.cloud.bigquery.client.Client.query", autospec=True + ) + + sql = "SELECT 17 AS num" + + query_job_mock = mock.create_autospec( + google.cloud.bigquery.job.QueryJob, instance=True + ) + + with client_query_patch as client_query_mock, default_patch: + client_query_mock.return_value = query_job_mock + ip.run_cell_magic("bigquery", "--max_results=5", sql) + + query_job_mock.result.assert_called_with(max_results=5) + query_job_mock.result.return_value.to_dataframe.assert_called_once_with( + bqstorage_client=None, + create_bqstorage_client=False, + progress_bar_type=mock.ANY, + ) + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_w_max_results_query_job_results_fails(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context._project = None + + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + default_patch = mock.patch( + "google.auth.default", return_value=(credentials_mock, "general-project") + ) + client_query_patch = mock.patch( + "google.cloud.bigquery.client.Client.query", autospec=True + ) + close_transports_patch = mock.patch( + "google.cloud.bigquery.magics.magics._close_transports", + autospec=True, + ) + + sql = "SELECT 17 AS num" + + query_job_mock = mock.create_autospec( + google.cloud.bigquery.job.QueryJob, instance=True + ) + query_job_mock.result.side_effect = [[], OSError] + + with pytest.raises( + OSError + ), client_query_patch as client_query_mock, ( + default_patch + ), close_transports_patch as close_transports: + client_query_mock.return_value = query_job_mock + ip.run_cell_magic("bigquery", "--max_results=5", sql) + + assert close_transports.called + + +def test_bigquery_magic_w_table_id_invalid(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context._project = None + + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + default_patch = mock.patch( + "google.auth.default", return_value=(credentials_mock, "general-project") + ) + + list_rows_patch = mock.patch( + "google.cloud.bigquery.magics.magics.bigquery.Client.list_rows", + autospec=True, + side_effect=exceptions.BadRequest("Not a valid table ID"), + ) + + table_id = "not-a-real-table" + + with list_rows_patch, default_patch, io.capture_output() as captured_io: + ip.run_cell_magic("bigquery", "df", table_id) + + output = captured_io.stderr + assert "Could not save output to variable" in output + assert "400 Not a valid table ID" in output + assert "Traceback (most recent call last)" not in output + + +def test_bigquery_magic_w_missing_query(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context._project = None + + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + default_patch = mock.patch( + "google.auth.default", return_value=(credentials_mock, "general-project") + ) + + cell_body = " \n \n \t\t \n " + + with io.capture_output() as captured_io, default_patch: + ip.run_cell_magic("bigquery", "df", cell_body) + + output = captured_io.stderr + assert "Could not save output to variable" in output + assert "Query is missing" in output + assert "Traceback (most recent call last)" not in output + + +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_bigquery_magic_w_table_id_and_destination_var(ipython_ns_cleanup): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context._project = None + + ipython_ns_cleanup.append((ip, "df")) + + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + default_patch = mock.patch( + "google.auth.default", return_value=(credentials_mock, "general-project") + ) + + row_iterator_mock = mock.create_autospec( + google.cloud.bigquery.table.RowIterator, instance=True + ) + + client_patch = mock.patch( + "google.cloud.bigquery.magics.magics.bigquery.Client", autospec=True + ) + + table_id = "bigquery-public-data.samples.shakespeare" + result = pandas.DataFrame([17], columns=["num"]) + + with client_patch as client_mock, default_patch: + client_mock().list_rows.return_value = row_iterator_mock + row_iterator_mock.to_dataframe.return_value = result + + ip.run_cell_magic("bigquery", "df", table_id) + + assert "df" in ip.user_ns + df = ip.user_ns["df"] + + assert isinstance(df, pandas.DataFrame) + + +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif( + bigquery_storage is None, reason="Requires `google-cloud-bigquery-storage`" +) +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_bigquery_magic_w_table_id_and_bqstorage_client(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context._project = None + + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + default_patch = mock.patch( + "google.auth.default", return_value=(credentials_mock, "general-project") + ) + + row_iterator_mock = mock.create_autospec( + google.cloud.bigquery.table.RowIterator, instance=True + ) + + client_patch = mock.patch( + "google.cloud.bigquery.magics.magics.bigquery.Client", autospec=True + ) + + bqstorage_mock = mock.create_autospec(bigquery_storage.BigQueryReadClient) + bqstorage_instance_mock = mock.create_autospec( + bigquery_storage.BigQueryReadClient, instance=True + ) + bqstorage_instance_mock._transport = mock.Mock() + bqstorage_mock.return_value = bqstorage_instance_mock + bqstorage_client_patch = mock.patch( + "google.cloud.bigquery_storage.BigQueryReadClient", bqstorage_mock + ) + + table_id = "bigquery-public-data.samples.shakespeare" + + with default_patch, client_patch as client_mock, bqstorage_client_patch: + client_mock()._ensure_bqstorage_client.return_value = bqstorage_instance_mock + client_mock().list_rows.return_value = row_iterator_mock + + ip.run_cell_magic("bigquery", "--max_results=5", table_id) + row_iterator_mock.to_dataframe.assert_called_once_with( + bqstorage_client=bqstorage_instance_mock, + create_bqstorage_client=mock.ANY, + ) + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_dryrun_option_sets_job_config(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + + sql = "SELECT 17 AS num" + + with run_query_patch as run_query_mock: + ip.run_cell_magic("bigquery", "--dry_run", sql) + + job_config_used = run_query_mock.call_args_list[0][1]["job_config"] + assert job_config_used.dry_run is True + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_dryrun_option_returns_query_job(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + query_job_mock = mock.create_autospec( + google.cloud.bigquery.job.QueryJob, instance=True + ) + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + + sql = "SELECT 17 AS num" + + with run_query_patch as run_query_mock, io.capture_output() as captured_io: + run_query_mock.return_value = query_job_mock + return_value = ip.run_cell_magic("bigquery", "--dry_run", sql) + + assert "Query validated. This query will process" in captured_io.stdout + assert isinstance(return_value, job.QueryJob) + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_dryrun_option_variable_error_message(ipython_ns_cleanup): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + ipython_ns_cleanup.append((ip, "q_job")) + + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", + autospec=True, + side_effect=exceptions.BadRequest("Syntax error in SQL query"), + ) + + sql = "SELECT SELECT 17 AS num" + + assert "q_job" not in ip.user_ns + + with run_query_patch, io.capture_output() as captured: + ip.run_cell_magic("bigquery", "q_job --dry_run", sql) + + full_text = captured.stderr + assert "Could not save output to variable 'q_job'." in full_text + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_dryrun_option_saves_query_job_to_variable(ipython_ns_cleanup): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + query_job_mock = mock.create_autospec( + google.cloud.bigquery.job.QueryJob, instance=True + ) + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + + ipython_ns_cleanup.append((ip, "q_job")) + + sql = "SELECT 17 AS num" + + assert "q_job" not in ip.user_ns + + with run_query_patch as run_query_mock: + run_query_mock.return_value = query_job_mock + return_value = ip.run_cell_magic("bigquery", "q_job --dry_run", sql) + + assert return_value is None + assert "q_job" in ip.user_ns + q_job = ip.user_ns["q_job"] + assert isinstance(q_job, job.QueryJob) + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_saves_query_job_to_variable_on_error(ipython_ns_cleanup): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + ipython_ns_cleanup.append((ip, "result")) + + client_query_patch = mock.patch( + "google.cloud.bigquery.client.Client.query", autospec=True + ) + + query_job = mock.create_autospec(job.QueryJob, instance=True) + exception = Exception("Unexpected SELECT") + exception.query_job = query_job + query_job.result.side_effect = exception + + sql = "SELECT SELECT 17 AS num" + + assert "result" not in ip.user_ns + + with client_query_patch as client_query_mock: + client_query_mock.return_value = query_job + return_value = ip.run_cell_magic("bigquery", "result", sql) + + assert return_value is None + assert "result" in ip.user_ns + result = ip.user_ns["result"] + assert isinstance(result, job.QueryJob) + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_w_maximum_bytes_billed_invalid(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context._project = None + + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + default_patch = mock.patch( + "google.auth.default", return_value=(credentials_mock, "general-project") + ) + client_query_patch = mock.patch("google.cloud.bigquery.client.Client.query") + + sql = "SELECT 17 AS num" + + with pytest.raises(ValueError), default_patch, client_query_patch: + ip.run_cell_magic("bigquery", "--maximum_bytes_billed=abc", sql) + + +@pytest.mark.parametrize( + "param_value,expected", [("987654321", "987654321"), ("None", "0")] +) +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_bigquery_magic_w_maximum_bytes_billed_overrides_context(param_value, expected): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context._project = None + + # Set the default maximum bytes billed, so we know it's overridable by the param. + magics.context.default_query_job_config.maximum_bytes_billed = 1234567 + + project = "test-project" + job_reference = copy.deepcopy(JOB_REFERENCE_RESOURCE) + job_reference["projectId"] = project + query = "SELECT 17 AS num" + resource = copy.deepcopy(QUERY_RESOURCE) + resource["jobReference"] = job_reference + resource["configuration"]["query"]["query"] = query + query_results = {"jobReference": job_reference, "totalRows": 0, "jobComplete": True} + data = {"jobReference": job_reference, "totalRows": 0, "rows": []} + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + default_patch = mock.patch( + "google.auth.default", return_value=(credentials_mock, "general-project") + ) + conn = magics.context._connection = make_connection(resource, query_results, data) + list_rows_patch = mock.patch( + "google.cloud.bigquery.client.Client._list_rows_from_query_results", + return_value=google.cloud.bigquery.table._EmptyRowIterator(), + ) + with list_rows_patch, default_patch: + ip.run_cell_magic( + "bigquery", "--maximum_bytes_billed={}".format(param_value), query + ) + + _, req = conn.api_request.call_args_list[0] + sent_config = req["data"]["configuration"]["query"] + assert sent_config["maximumBytesBilled"] == expected + + +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_bigquery_magic_w_maximum_bytes_billed_w_context_inplace(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context._project = None + + magics.context.default_query_job_config.maximum_bytes_billed = 1337 + + project = "test-project" + job_reference = copy.deepcopy(JOB_REFERENCE_RESOURCE) + job_reference["projectId"] = project + query = "SELECT 17 AS num" + resource = copy.deepcopy(QUERY_RESOURCE) + resource["jobReference"] = job_reference + resource["configuration"]["query"]["query"] = query + query_results = {"jobReference": job_reference, "totalRows": 0, "jobComplete": True} + data = {"jobReference": job_reference, "totalRows": 0, "rows": []} + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + default_patch = mock.patch( + "google.auth.default", return_value=(credentials_mock, "general-project") + ) + conn = magics.context._connection = make_connection(resource, query_results, data) + list_rows_patch = mock.patch( + "google.cloud.bigquery.client.Client._list_rows_from_query_results", + return_value=google.cloud.bigquery.table._EmptyRowIterator(), + ) + with list_rows_patch, default_patch: + ip.run_cell_magic("bigquery", "", query) + + _, req = conn.api_request.call_args_list[0] + sent_config = req["data"]["configuration"]["query"] + assert sent_config["maximumBytesBilled"] == "1337" + + +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_bigquery_magic_w_maximum_bytes_billed_w_context_setter(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context._project = None + + magics.context.default_query_job_config = job.QueryJobConfig( + maximum_bytes_billed=10203 + ) + + project = "test-project" + job_reference = copy.deepcopy(JOB_REFERENCE_RESOURCE) + job_reference["projectId"] = project + query = "SELECT 17 AS num" + resource = copy.deepcopy(QUERY_RESOURCE) + resource["jobReference"] = job_reference + resource["configuration"]["query"]["query"] = query + query_results = {"jobReference": job_reference, "totalRows": 0, "jobComplete": True} + data = {"jobReference": job_reference, "totalRows": 0, "rows": []} + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + default_patch = mock.patch( + "google.auth.default", return_value=(credentials_mock, "general-project") + ) + conn = magics.context._connection = make_connection(resource, query_results, data) + list_rows_patch = mock.patch( + "google.cloud.bigquery.client.Client._list_rows_from_query_results", + return_value=google.cloud.bigquery.table._EmptyRowIterator(), + ) + with list_rows_patch, default_patch: + ip.run_cell_magic("bigquery", "", query) + + _, req = conn.api_request.call_args_list[0] + sent_config = req["data"]["configuration"]["query"] + assert sent_config["maximumBytesBilled"] == "10203" + + +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_bigquery_magic_with_no_query_cache(monkeypatch): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + conn = make_connection() + monkeypatch.setattr(magics.context, "_connection", conn) + monkeypatch.setattr(magics.context, "project", "project-from-context") + + # --no_query_cache option should override context. + monkeypatch.setattr( + magics.context.default_query_job_config, "use_query_cache", True + ) + + ip.run_cell_magic("bigquery", "--no_query_cache", QUERY_STRING) + + conn.api_request.assert_called_with( + method="POST", + path="/projects/project-from-context/jobs", + data=mock.ANY, + timeout=DEFAULT_TIMEOUT, + ) + jobs_insert_call = [ + call + for call in conn.api_request.call_args_list + if call[1]["path"] == "/projects/project-from-context/jobs" + ][0] + assert not jobs_insert_call[1]["data"]["configuration"]["query"]["useQueryCache"] + + +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_context_with_no_query_cache_from_context(monkeypatch): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + conn = make_connection() + monkeypatch.setattr(magics.context, "_connection", conn) + monkeypatch.setattr(magics.context, "project", "project-from-context") + monkeypatch.setattr( + magics.context.default_query_job_config, "use_query_cache", False + ) + + ip.run_cell_magic("bigquery", "", QUERY_STRING) + + conn.api_request.assert_called_with( + method="POST", + path="/projects/project-from-context/jobs", + data=mock.ANY, + timeout=DEFAULT_TIMEOUT, + ) + jobs_insert_call = [ + call + for call in conn.api_request.call_args_list + if call[1]["path"] == "/projects/project-from-context/jobs" + ][0] + assert not jobs_insert_call[1]["data"]["configuration"]["query"]["useQueryCache"] + + +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_bigquery_magic_w_progress_bar_type_w_context_setter(monkeypatch): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context._project = None + + magics.context.progress_bar_type = "tqdm_gui" + + mock_credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + # Set up the context with monkeypatch so that it's reset for subsequent + # tests. + monkeypatch.setattr(magics.context, "_credentials", mock_credentials) + + # Mock out the BigQuery Storage API. + bqstorage_mock = mock.create_autospec(bigquery_storage.BigQueryReadClient) + bqstorage_client_patch = mock.patch( + "google.cloud.bigquery_storage.BigQueryReadClient", bqstorage_mock + ) + + sql = "SELECT 17 AS num" + result = pandas.DataFrame([17], columns=["num"]) + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + query_job_mock = mock.create_autospec( + google.cloud.bigquery.job.QueryJob, instance=True + ) + query_job_mock.to_dataframe.return_value = result + with run_query_patch as run_query_mock, bqstorage_client_patch: + run_query_mock.return_value = query_job_mock + + return_value = ip.run_cell_magic("bigquery", "--use_rest_api", sql) + + bqstorage_mock.assert_not_called() + query_job_mock.to_dataframe.assert_called_once_with( + bqstorage_client=None, + create_bqstorage_client=False, + progress_bar_type=magics.context.progress_bar_type, + ) + + assert isinstance(return_value, pandas.DataFrame) + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_with_progress_bar_type(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.progress_bar_type = None + + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + with run_query_patch as run_query_mock: + ip.run_cell_magic( + "bigquery", "--progress_bar_type=tqdm_gui", "SELECT 17 as num" + ) + + progress_bar_used = run_query_mock.mock_calls[1][2]["progress_bar_type"] + assert progress_bar_used == "tqdm_gui" + # context progress bar type should not change + assert magics.context.progress_bar_type is None + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_with_project(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context._project = None + + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + default_patch = mock.patch( + "google.auth.default", return_value=(credentials_mock, "general-project") + ) + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + with run_query_patch as run_query_mock, default_patch: + ip.run_cell_magic("bigquery", "--project=specific-project", "SELECT 17 as num") + + client_used = run_query_mock.call_args_list[0][0][0] + assert client_used.project == "specific-project" + # context project should not change + assert magics.context.project == "general-project" + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_with_bigquery_api_endpoint(ipython_ns_cleanup): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context._connection = None + + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + with run_query_patch as run_query_mock: + ip.run_cell_magic( + "bigquery", + "--bigquery_api_endpoint=https://bigquery_api.endpoint.com", + "SELECT 17 as num", + ) + + connection_used = run_query_mock.call_args_list[0][0][0]._connection + assert connection_used.API_BASE_URL == "https://bigquery_api.endpoint.com" + # context client options should not change + assert magics.context.bigquery_client_options.api_endpoint is None + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_with_bigquery_api_endpoint_context_dict(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context._connection = None + magics.context.bigquery_client_options = {} + + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + with run_query_patch as run_query_mock: + ip.run_cell_magic( + "bigquery", + "--bigquery_api_endpoint=https://bigquery_api.endpoint.com", + "SELECT 17 as num", + ) + + connection_used = run_query_mock.call_args_list[0][0][0]._connection + assert connection_used.API_BASE_URL == "https://bigquery_api.endpoint.com" + # context client options should not change + assert magics.context.bigquery_client_options == {} + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_with_bqstorage_api_endpoint(ipython_ns_cleanup): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context._connection = None + + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + with run_query_patch as run_query_mock: + ip.run_cell_magic( + "bigquery", + "--bqstorage_api_endpoint=https://bqstorage_api.endpoint.com", + "SELECT 17 as num", + ) + + client_used = run_query_mock.mock_calls[1][2]["bqstorage_client"] + assert client_used._transport._host == "https://bqstorage_api.endpoint.com" + # context client options should not change + assert magics.context.bqstorage_client_options.api_endpoint is None + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_with_bqstorage_api_endpoint_context_dict(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context._connection = None + magics.context.bqstorage_client_options = {} + + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + with run_query_patch as run_query_mock: + ip.run_cell_magic( + "bigquery", + "--bqstorage_api_endpoint=https://bqstorage_api.endpoint.com", + "SELECT 17 as num", + ) + + client_used = run_query_mock.mock_calls[1][2]["bqstorage_client"] + assert client_used._transport._host == "https://bqstorage_api.endpoint.com" + # context client options should not change + assert magics.context.bqstorage_client_options == {} + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_with_multiple_options(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context._project = None + + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + default_patch = mock.patch( + "google.auth.default", return_value=(credentials_mock, "general-project") + ) + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + with run_query_patch as run_query_mock, default_patch: + ip.run_cell_magic( + "bigquery", + "--project=specific-project --use_legacy_sql --maximum_bytes_billed 1024", + "SELECT 17 as num", + ) + + args, kwargs = run_query_mock.call_args + client_used = args[0] + assert client_used.project == "specific-project" + + job_config_used = kwargs["job_config"] + assert job_config_used.use_legacy_sql + assert job_config_used.maximum_bytes_billed == 1024 + + +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_bigquery_magic_with_string_params(ipython_ns_cleanup): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + ipython_ns_cleanup.append((ip, "params_dict_df")) + + sql = "SELECT @num AS num" + result = pandas.DataFrame([17], columns=["num"]) + + assert "params_dict_df" not in ip.user_ns + + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + query_job_mock = mock.create_autospec( + google.cloud.bigquery.job.QueryJob, instance=True + ) + query_job_mock.to_dataframe.return_value = result + + with run_query_patch as run_query_mock: + run_query_mock.return_value = query_job_mock + + ip.run_cell_magic("bigquery", "params_string_df --params='{\"num\":17}'", sql) + + run_query_mock.assert_called_once_with(mock.ANY, sql.format(num=17), mock.ANY) + + assert "params_string_df" in ip.user_ns # verify that the variable exists + df = ip.user_ns["params_string_df"] + assert len(df) == len(result) # verify row count + assert list(df) == list(result) # verify column names + + +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_bigquery_magic_with_dict_params(ipython_ns_cleanup): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + ipython_ns_cleanup.append((ip, "params_dict_df")) + + sql = "SELECT @num AS num, @tricky_value as tricky_value" + result = pandas.DataFrame( + [(False, '--params "value"')], columns=["valid", "tricky_value"] + ) + + assert "params_dict_df" not in ip.user_ns + + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + query_job_mock = mock.create_autospec( + google.cloud.bigquery.job.QueryJob, instance=True + ) + query_job_mock.to_dataframe.return_value = result + with run_query_patch as run_query_mock: + run_query_mock.return_value = query_job_mock + + params = {"valid": False, "tricky_value": '--params "value"'} + # Insert dictionary into user namespace so that it can be expanded + ip.user_ns["params"] = params + ip.run_cell_magic("bigquery", "params_dict_df --params $params", sql) + + run_query_mock.assert_called_once_with(mock.ANY, sql.format(num=17), mock.ANY) + + assert "params_dict_df" in ip.user_ns # verify that the variable exists + df = ip.user_ns["params_dict_df"] + assert len(df) == len(result) # verify row count + assert list(df) == list(result) # verify column names + + assert not df["valid"][0] + assert df["tricky_value"][0] == '--params "value"' + + +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_bigquery_magic_with_dict_params_nonexisting(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + sql = "SELECT @foo AS foo" + + with pytest.raises(NameError, match=r".*undefined variable.*unknown_name.*"): + ip.run_cell_magic("bigquery", "params_dict_df --params $unknown_name", sql) + + +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_bigquery_magic_with_dict_params_incorrect_syntax(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + sql = "SELECT @foo AS foo" + + with pytest.raises(SyntaxError, match=r".*--params.*"): + cell_magic_args = "params_dict_df --params {'foo': 1; 'bar': 2}" + ip.run_cell_magic("bigquery", cell_magic_args, sql) + + +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_bigquery_magic_with_dict_params_duplicate(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + sql = "SELECT @foo AS foo" + + with pytest.raises(ValueError, match=r"Duplicate --params option\."): + cell_magic_args = ( + "params_dict_df --params {'foo': 1} --verbose --params {'bar': 2} " + ) + ip.run_cell_magic("bigquery", cell_magic_args, sql) + + +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_bigquery_magic_with_option_value_incorrect(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + sql = "SELECT @foo AS foo" + + with pytest.raises(ValueError, match=r".*invalid literal.*\[PLENTY!\].*"): + cell_magic_args = "params_dict_df --max_results [PLENTY!]" + ip.run_cell_magic("bigquery", cell_magic_args, sql) + + +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_bigquery_magic_with_dict_params_negative_value(ipython_ns_cleanup): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + ipython_ns_cleanup.append((ip, "params_dict_df")) + + sql = "SELECT @num AS num" + result = pandas.DataFrame([-17], columns=["num"]) + + assert "params_dict_df" not in ip.user_ns + + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + query_job_mock = mock.create_autospec( + google.cloud.bigquery.job.QueryJob, instance=True + ) + query_job_mock.to_dataframe.return_value = result + with run_query_patch as run_query_mock: + run_query_mock.return_value = query_job_mock + + params = {"num": -17} + # Insert dictionary into user namespace so that it can be expanded + ip.user_ns["params"] = params + ip.run_cell_magic("bigquery", "params_dict_df --params $params", sql) + + run_query_mock.assert_called_once_with(mock.ANY, sql.format(num=-17), mock.ANY) + + assert "params_dict_df" in ip.user_ns # verify that the variable exists + df = ip.user_ns["params_dict_df"] + assert len(df) == len(result) # verify row count + assert list(df) == list(result) # verify column names + assert df["num"][0] == -17 + + +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_bigquery_magic_with_dict_params_array_value(ipython_ns_cleanup): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + ipython_ns_cleanup.append((ip, "params_dict_df")) + + sql = "SELECT @num AS num" + result = pandas.DataFrame(["foo bar", "baz quux"], columns=["array_data"]) + + assert "params_dict_df" not in ip.user_ns + + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + query_job_mock = mock.create_autospec( + google.cloud.bigquery.job.QueryJob, instance=True + ) + query_job_mock.to_dataframe.return_value = result + with run_query_patch as run_query_mock: + run_query_mock.return_value = query_job_mock + + params = {"array_data": ["foo bar", "baz quux"]} + # Insert dictionary into user namespace so that it can be expanded + ip.user_ns["params"] = params + ip.run_cell_magic("bigquery", "params_dict_df --params $params", sql) + + run_query_mock.assert_called_once_with(mock.ANY, sql.format(num=-17), mock.ANY) + + assert "params_dict_df" in ip.user_ns # verify that the variable exists + df = ip.user_ns["params_dict_df"] + assert len(df) == len(result) # verify row count + assert list(df) == list(result) # verify column names + assert list(df["array_data"]) == ["foo bar", "baz quux"] + + +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_bigquery_magic_with_dict_params_tuple_value(ipython_ns_cleanup): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + ipython_ns_cleanup.append((ip, "params_dict_df")) + + sql = "SELECT @num AS num" + result = pandas.DataFrame(["foo bar", "baz quux"], columns=["array_data"]) + + assert "params_dict_df" not in ip.user_ns + + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + query_job_mock = mock.create_autospec( + google.cloud.bigquery.job.QueryJob, instance=True + ) + query_job_mock.to_dataframe.return_value = result + with run_query_patch as run_query_mock: + run_query_mock.return_value = query_job_mock + + params = {"array_data": ("foo bar", "baz quux")} + # Insert dictionary into user namespace so that it can be expanded + ip.user_ns["params"] = params + ip.run_cell_magic("bigquery", "params_dict_df --params $params", sql) + + run_query_mock.assert_called_once_with(mock.ANY, sql.format(num=-17), mock.ANY) + + assert "params_dict_df" in ip.user_ns # verify that the variable exists + df = ip.user_ns["params_dict_df"] + assert len(df) == len(result) # verify row count + assert list(df) == list(result) # verify column names + assert list(df["array_data"]) == ["foo bar", "baz quux"] + + +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_bigquery_magic_with_improperly_formatted_params(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + sql = "SELECT @num AS num" + + with pytest.raises(SyntaxError): + ip.run_cell_magic("bigquery", "--params {17}", sql) + + +@pytest.mark.parametrize( + "raw_sql", ("SELECT answer AS 42", " \t SELECT answer AS 42 \t ") +) +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_bigquery_magic_valid_query_in_existing_variable(ipython_ns_cleanup, raw_sql): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + ipython_ns_cleanup.append((ip, "custom_query")) + ipython_ns_cleanup.append((ip, "query_results_df")) + + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + query_job_mock = mock.create_autospec( + google.cloud.bigquery.job.QueryJob, instance=True + ) + mock_result = pandas.DataFrame([42], columns=["answer"]) + query_job_mock.to_dataframe.return_value = mock_result + + ip.user_ns["custom_query"] = raw_sql + cell_body = "$custom_query" # Referring to an existing variable name (custom_query) + assert "query_results_df" not in ip.user_ns + + with run_query_patch as run_query_mock: + run_query_mock.return_value = query_job_mock + + ip.run_cell_magic("bigquery", "query_results_df", cell_body) + + run_query_mock.assert_called_once_with(mock.ANY, raw_sql, mock.ANY) + + assert "query_results_df" in ip.user_ns # verify that the variable exists + df = ip.user_ns["query_results_df"] + assert len(df) == len(mock_result) # verify row count + assert list(df) == list(mock_result) # verify column names + assert list(df["answer"]) == [42] + + +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_bigquery_magic_nonexisting_query_variable(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + + ip.user_ns.pop("custom_query", None) # Make sure the variable does NOT exist. + cell_body = "$custom_query" # Referring to a non-existing variable name. + + with pytest.raises( + NameError, match=r".*custom_query does not exist.*" + ), run_query_patch as run_query_mock: + ip.run_cell_magic("bigquery", "", cell_body) + + run_query_mock.assert_not_called() + + +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_bigquery_magic_empty_query_variable_name(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + cell_body = "$" # Not referring to any variable (name omitted). + + with pytest.raises( + NameError, match=r"(?i).*missing query variable name.*" + ), run_query_patch as run_query_mock: + ip.run_cell_magic("bigquery", "", cell_body) + + run_query_mock.assert_not_called() + + +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_bigquery_magic_query_variable_non_string(ipython_ns_cleanup): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + + ipython_ns_cleanup.append((ip, "custom_query")) + + ip.user_ns["custom_query"] = object() + cell_body = "$custom_query" # Referring to a non-string variable. + + with pytest.raises( + TypeError, match=r".*must be a string or a bytes-like.*" + ), run_query_patch as run_query_mock: + ip.run_cell_magic("bigquery", "", cell_body) + + run_query_mock.assert_not_called() + + +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_bigquery_magic_query_variable_not_identifier(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + cell_body = "$123foo" # 123foo is not valid Python identifier + + with io.capture_output() as captured_io: + ip.run_cell_magic("bigquery", "", cell_body) + + # If "$" prefixes a string that is not a Python identifier, we do not treat such + # cell_body as a variable reference and just treat is as any other cell body input. + # If at the same time the cell body does not contain any whitespace, it is + # considered a table name, thus we expect an error that the table ID is not valid. + output = captured_io.stderr + assert "ERROR:" in output + assert "must be a fully-qualified ID" in output + + +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_bigquery_magic_with_invalid_multiple_option_values(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + sql = "SELECT @foo AS foo" + + exc_pattern = r".*[Uu]nrecognized input.*option values correct\?.*567.*" + + with pytest.raises(ValueError, match=exc_pattern): + cell_magic_args = "params_dict_df --max_results 10 567" + ip.run_cell_magic("bigquery", cell_magic_args, sql) + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_omits_tracebacks_from_error_message(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + default_patch = mock.patch( + "google.auth.default", return_value=(credentials_mock, "general-project") + ) + + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", + autospec=True, + side_effect=exceptions.BadRequest("Syntax error in SQL query"), + ) + + with run_query_patch, default_patch, io.capture_output() as captured_io: + ip.run_cell_magic("bigquery", "", "SELECT foo FROM WHERE LIMIT bar") + + output = captured_io.stderr + assert "400 Syntax error in SQL query" in output + assert "Traceback (most recent call last)" not in output + assert "Syntax error" not in captured_io.stdout + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_w_destination_table_invalid_format(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context._project = None + + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + default_patch = mock.patch( + "google.auth.default", return_value=(credentials_mock, "general-project") + ) + + client_patch = mock.patch( + "google.cloud.bigquery.magics.magics.bigquery.Client", autospec=True + ) + + with client_patch, default_patch, pytest.raises(ValueError) as exc_context: + ip.run_cell_magic( + "bigquery", "--destination_table dataset", "SELECT foo FROM WHERE LIMIT bar" + ) + error_msg = str(exc_context.value) + assert ( + "--destination_table should be in a " + ". format." in error_msg + ) + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_w_destination_table(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + create_dataset_if_necessary_patch = mock.patch( + "google.cloud.bigquery.magics.magics._create_dataset_if_necessary", + autospec=True, + ) + + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + + with create_dataset_if_necessary_patch, run_query_patch as run_query_mock: + ip.run_cell_magic( + "bigquery", + "--destination_table dataset_id.table_id", + "SELECT foo FROM WHERE LIMIT bar", + ) + + job_config_used = run_query_mock.call_args_list[0][1]["job_config"] + assert job_config_used.allow_large_results is True + assert job_config_used.create_disposition == "CREATE_IF_NEEDED" + assert job_config_used.write_disposition == "WRITE_TRUNCATE" + assert job_config_used.destination.dataset_id == "dataset_id" + assert job_config_used.destination.table_id == "table_id" + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_create_dataset_fails(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + create_dataset_if_necessary_patch = mock.patch( + "google.cloud.bigquery.magics.magics._create_dataset_if_necessary", + autospec=True, + side_effect=OSError, + ) + close_transports_patch = mock.patch( + "google.cloud.bigquery.magics.magics._close_transports", + autospec=True, + ) + + with pytest.raises( + OSError + ), create_dataset_if_necessary_patch, close_transports_patch as close_transports: + ip.run_cell_magic( + "bigquery", + "--destination_table dataset_id.table_id", + "SELECT foo FROM WHERE LIMIT bar", + ) + + assert close_transports.called + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_with_location(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + run_query_patch = mock.patch( + "google.cloud.bigquery.magics.magics._run_query", autospec=True + ) + with run_query_patch as run_query_mock: + ip.run_cell_magic("bigquery", "--location=us-east1", "SELECT 17 AS num") + + client_options_used = run_query_mock.call_args_list[0][0][0] + assert client_options_used.location == "us-east1" From 49089ae47c1c3f1a4d46732cb046118b8e417877 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Wed, 10 Apr 2024 15:57:41 -0500 Subject: [PATCH 12/20] include google-cloud-testutils in unittests --- owlbot.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/owlbot.py b/owlbot.py index b274d80..2bcf982 100644 --- a/owlbot.py +++ b/owlbot.py @@ -39,6 +39,7 @@ cov_level=100, unit_test_extras=extras, unit_test_extras_by_python=extras_by_python, + unit_test_external_dependencies=["google-cloud-testutils"], system_test_extras=extras, intersphinx_dependencies={ "pandas": "https://pandas.pydata.org/pandas-docs/stable/", @@ -84,6 +85,6 @@ # Final cleanup # ---------------------------------------------------------------------------- -s.shell.run(["nox", "-s", "blacken"], hide_output=False) +s.shell.run(["nox", "-s", "format"], hide_output=False) for noxfile in REPO_ROOT.glob("samples/**/noxfile.py"): s.shell.run(["nox", "-s", "blacken"], cwd=noxfile.parent, hide_output=False) \ No newline at end of file From 50b2266e52a99b6a8fe9c59b07d36c25c36a9fe4 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Wed, 10 Apr 2024 20:59:33 +0000 Subject: [PATCH 13/20] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20?= =?UTF-8?q?post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- bigquery_magics/__init__.py | 1 - bigquery_magics/line_arg_parser/__init__.py | 6 ++---- bigquery_magics/line_arg_parser/lexer.py | 7 ++----- bigquery_magics/line_arg_parser/parser.py | 10 ++++++---- bigquery_magics/magics.py | 11 ++++------- docs/conf.py | 2 +- noxfile.py | 4 +++- setup.py | 1 - 8 files changed, 18 insertions(+), 24 deletions(-) diff --git a/bigquery_magics/__init__.py b/bigquery_magics/__init__.py index d228a35..704bb2a 100644 --- a/bigquery_magics/__init__.py +++ b/bigquery_magics/__init__.py @@ -14,7 +14,6 @@ from google.cloud.bigquery.magics.magics import context - # For backwards compatibility we need to make the context available in the path # google.cloud.bigquery.magics.context __all__ = ("context",) diff --git a/bigquery_magics/line_arg_parser/__init__.py b/bigquery_magics/line_arg_parser/__init__.py index 9471446..bcaab2c 100644 --- a/bigquery_magics/line_arg_parser/__init__.py +++ b/bigquery_magics/line_arg_parser/__init__.py @@ -12,17 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -from google.cloud.bigquery.magics.line_arg_parser.exceptions import ParseError from google.cloud.bigquery.magics.line_arg_parser.exceptions import ( DuplicateQueryParamsError, + ParseError, QueryParamsParseError, ) -from google.cloud.bigquery.magics.line_arg_parser.lexer import Lexer -from google.cloud.bigquery.magics.line_arg_parser.lexer import TokenType +from google.cloud.bigquery.magics.line_arg_parser.lexer import Lexer, TokenType from google.cloud.bigquery.magics.line_arg_parser.parser import Parser from google.cloud.bigquery.magics.line_arg_parser.visitors import QueryParamsExtractor - __all__ = ( "DuplicateQueryParamsError", "Lexer", diff --git a/bigquery_magics/line_arg_parser/lexer.py b/bigquery_magics/line_arg_parser/lexer.py index 71b287d..6e8b4cc 100644 --- a/bigquery_magics/line_arg_parser/lexer.py +++ b/bigquery_magics/line_arg_parser/lexer.py @@ -12,14 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from collections import namedtuple -from collections import OrderedDict +from collections import OrderedDict, namedtuple +import enum import itertools import re -import enum - - Token = namedtuple("Token", ("type_", "lexeme", "pos")) StateTransition = namedtuple("StateTransition", ("new_state", "total_offset")) diff --git a/bigquery_magics/line_arg_parser/parser.py b/bigquery_magics/line_arg_parser/parser.py index b9da20c..18c0c01 100644 --- a/bigquery_magics/line_arg_parser/parser.py +++ b/bigquery_magics/line_arg_parser/parser.py @@ -12,10 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -from google.cloud.bigquery.magics.line_arg_parser import DuplicateQueryParamsError -from google.cloud.bigquery.magics.line_arg_parser import ParseError -from google.cloud.bigquery.magics.line_arg_parser import QueryParamsParseError -from google.cloud.bigquery.magics.line_arg_parser import TokenType +from google.cloud.bigquery.magics.line_arg_parser import ( + DuplicateQueryParamsError, + ParseError, + QueryParamsParseError, + TokenType, +) class ParseNode(object): diff --git a/bigquery_magics/magics.py b/bigquery_magics/magics.py index 2a3583c..7338062 100644 --- a/bigquery_magics/magics.py +++ b/bigquery_magics/magics.py @@ -82,14 +82,14 @@ from __future__ import print_function -import re import ast +from concurrent import futures import copy import functools +import re import sys import time import warnings -from concurrent import futures try: import IPython # type: ignore @@ -98,18 +98,15 @@ except ImportError: # pragma: NO COVER raise ImportError("This module can only be loaded in IPython.") -from google.api_core import client_info -from google.api_core import client_options +from google.api_core import client_info, client_options from google.api_core.exceptions import NotFound import google.auth # type: ignore from google.cloud import bigquery +from google.cloud.bigquery import _versions_helpers, exceptions import google.cloud.bigquery.dataset -from google.cloud.bigquery import _versions_helpers -from google.cloud.bigquery import exceptions from google.cloud.bigquery.dbapi import _helpers from google.cloud.bigquery.magics import line_arg_parser as lap - IPYTHON_USER_AGENT = "ipython-{}".format(IPython.__version__) diff --git a/docs/conf.py b/docs/conf.py index 371bcff..d973f7d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -24,9 +24,9 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys import os import shlex +import sys # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the diff --git a/noxfile.py b/noxfile.py index 08ee214..b8e5b01 100644 --- a/noxfile.py +++ b/noxfile.py @@ -42,7 +42,9 @@ "pytest-cov", "pytest-asyncio", ] -UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [ + "google-cloud-testutils", +] UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] UNIT_TEST_DEPENDENCIES: List[str] = [] UNIT_TEST_EXTRAS: List[str] = [ diff --git a/setup.py b/setup.py index 9fbc91e..848ee6c 100644 --- a/setup.py +++ b/setup.py @@ -17,7 +17,6 @@ import setuptools - # Package metadata. name = "google-cloud-bigquery" From c61d4665712b9bae7fe61a480dbb960f3e849a07 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Wed, 10 Apr 2024 16:05:40 -0500 Subject: [PATCH 14/20] use bqstorage extras --- owlbot.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/owlbot.py b/owlbot.py index 2bcf982..04be095 100644 --- a/owlbot.py +++ b/owlbot.py @@ -30,17 +30,18 @@ extras_by_python = { # Use a middle version of Python to test when no extras are installed. - "3.8": [] + "3.11": [] } -extras = ["tqdm"] +extras = ["bqstorage"] templated_files = common.py_library( unit_test_python_versions=["3.7", "3.8", "3.11", "3.12"], - system_test_python_versions=["3.8", "3.12"], + system_test_python_versions=["3.8", "3.11", "3.12"], cov_level=100, unit_test_extras=extras, unit_test_extras_by_python=extras_by_python, unit_test_external_dependencies=["google-cloud-testutils"], system_test_extras=extras, + system_test_extras_by_python=extras_by_python, intersphinx_dependencies={ "pandas": "https://pandas.pydata.org/pandas-docs/stable/", "pydata-google-auth": "https://pydata-google-auth.readthedocs.io/en/latest/", From 2c0443a6138648848382fb83c031b23014ffb67d Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Wed, 10 Apr 2024 21:13:44 +0000 Subject: [PATCH 15/20] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20?= =?UTF-8?q?post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- CONTRIBUTING.rst | 2 +- noxfile.py | 12 +++++++----- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 582c599..8ccc1af 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -148,7 +148,7 @@ Running System Tests .. note:: - System tests are only configured to run under Python 3.8 and 3.12. + System tests are only configured to run under Python 3.8, 3.11 and 3.12. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local diff --git a/noxfile.py b/noxfile.py index b8e5b01..af754c4 100644 --- a/noxfile.py +++ b/noxfile.py @@ -48,13 +48,13 @@ UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] UNIT_TEST_DEPENDENCIES: List[str] = [] UNIT_TEST_EXTRAS: List[str] = [ - "tqdm", + "bqstorage", ] UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = { - "3.8": [], + "3.11": [], } -SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.12"] +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.11", "3.12"] SYSTEM_TEST_STANDARD_DEPENDENCIES: List[str] = [ "mock", "pytest", @@ -64,9 +64,11 @@ SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] SYSTEM_TEST_DEPENDENCIES: List[str] = [] SYSTEM_TEST_EXTRAS: List[str] = [ - "tqdm", + "bqstorage", ] -SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = { + "3.11": [], +} CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() From 5f4d1ceb861c56ba3b87d3a04b18b43c634595d5 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Wed, 10 Apr 2024 17:03:49 -0500 Subject: [PATCH 16/20] use extras by python for everything --- owlbot.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/owlbot.py b/owlbot.py index 04be095..ca0d20e 100644 --- a/owlbot.py +++ b/owlbot.py @@ -28,19 +28,22 @@ # Add templated files # ---------------------------------------------------------------------------- +extras = ["bqstorage"] extras_by_python = { + "3.7": extras, + "3.8": extras, + "3.9": extras, + "3.10": extras, # Use a middle version of Python to test when no extras are installed. - "3.11": [] + "3.11": [], + "3.12": extras, } -extras = ["bqstorage"] templated_files = common.py_library( unit_test_python_versions=["3.7", "3.8", "3.11", "3.12"], system_test_python_versions=["3.8", "3.11", "3.12"], cov_level=100, - unit_test_extras=extras, unit_test_extras_by_python=extras_by_python, unit_test_external_dependencies=["google-cloud-testutils"], - system_test_extras=extras, system_test_extras_by_python=extras_by_python, intersphinx_dependencies={ "pandas": "https://pandas.pydata.org/pandas-docs/stable/", From c68db281664acbe428a1344ad25694424afd48a9 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Wed, 10 Apr 2024 22:05:52 +0000 Subject: [PATCH 17/20] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20?= =?UTF-8?q?post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- noxfile.py | 38 ++++++++++++++++++++++++++++++++------ 1 file changed, 32 insertions(+), 6 deletions(-) diff --git a/noxfile.py b/noxfile.py index af754c4..f5dd689 100644 --- a/noxfile.py +++ b/noxfile.py @@ -47,11 +47,24 @@ ] UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] UNIT_TEST_DEPENDENCIES: List[str] = [] -UNIT_TEST_EXTRAS: List[str] = [ - "bqstorage", -] +UNIT_TEST_EXTRAS: List[str] = [] UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = { + "3.7": [ + "bqstorage", + ], + "3.8": [ + "bqstorage", + ], + "3.9": [ + "bqstorage", + ], + "3.10": [ + "bqstorage", + ], "3.11": [], + "3.12": [ + "bqstorage", + ], } SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.11", "3.12"] @@ -63,11 +76,24 @@ SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] SYSTEM_TEST_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_EXTRAS: List[str] = [ - "bqstorage", -] +SYSTEM_TEST_EXTRAS: List[str] = [] SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = { + "3.7": [ + "bqstorage", + ], + "3.8": [ + "bqstorage", + ], + "3.9": [ + "bqstorage", + ], + "3.10": [ + "bqstorage", + ], "3.11": [], + "3.12": [ + "bqstorage", + ], } CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() From b23de2a408dc48dfc936cc59dd915e4654dae5a5 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Wed, 10 Apr 2024 15:55:45 -0500 Subject: [PATCH 18/20] update tests to use bigquery_magics module --- .kokoro/presubmit/snippets-3.12.cfg | 7 - .../{snippets-3.8.cfg => system-3.11.cfg} | 4 +- bigquery_magics/__init__.py | 26 +- bigquery_magics/_versions_helpers.py | 91 +++ bigquery_magics/{magics.py => bigquery.py} | 219 +------ bigquery_magics/config.py | 187 ++++++ bigquery_magics/line_arg_parser/__init__.py | 12 +- bigquery_magics/line_arg_parser/lexer.py | 7 +- bigquery_magics/line_arg_parser/parser.py | 148 +++-- bigquery_magics/version.py | 2 +- docs/conf.py | 2 +- noxfile.py | 50 +- owlbot.py | 116 ++-- setup.py | 22 +- testing/constraints-3.7.txt | 4 +- tests/system/test_bigquery.py | 5 +- tests/unit/line_arg_parser/test_parser.py | 21 +- tests/unit/test__versions_helpers.py | 108 ++++ tests/unit/test_bigquery.py | 580 ++++++++---------- 19 files changed, 900 insertions(+), 711 deletions(-) delete mode 100644 .kokoro/presubmit/snippets-3.12.cfg rename .kokoro/presubmit/{snippets-3.8.cfg => system-3.11.cfg} (80%) create mode 100644 bigquery_magics/_versions_helpers.py rename bigquery_magics/{magics.py => bigquery.py} (76%) create mode 100644 bigquery_magics/config.py create mode 100644 tests/unit/test__versions_helpers.py diff --git a/.kokoro/presubmit/snippets-3.12.cfg b/.kokoro/presubmit/snippets-3.12.cfg deleted file mode 100644 index 1381e83..0000000 --- a/.kokoro/presubmit/snippets-3.12.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Only run this nox session. -env_vars: { - key: "NOX_SESSION" - value: "snippets-3.12" -} diff --git a/.kokoro/presubmit/snippets-3.8.cfg b/.kokoro/presubmit/system-3.11.cfg similarity index 80% rename from .kokoro/presubmit/snippets-3.8.cfg rename to .kokoro/presubmit/system-3.11.cfg index 840d9e7..90dc133 100644 --- a/.kokoro/presubmit/snippets-3.8.cfg +++ b/.kokoro/presubmit/system-3.11.cfg @@ -3,5 +3,5 @@ # Only run this nox session. env_vars: { key: "NOX_SESSION" - value: "snippets-3.8" -} + value: "system-3.11" +} \ No newline at end of file diff --git a/bigquery_magics/__init__.py b/bigquery_magics/__init__.py index d228a35..627ca98 100644 --- a/bigquery_magics/__init__.py +++ b/bigquery_magics/__init__.py @@ -12,9 +12,27 @@ # See the License for the specific language governing permissions and # limitations under the License. -from google.cloud.bigquery.magics.magics import context +import bigquery_magics.config +import bigquery_magics.version +context = bigquery_magics.config.context +__version__ = bigquery_magics.version.__version__ -# For backwards compatibility we need to make the context available in the path -# google.cloud.bigquery.magics.context -__all__ = ("context",) + +def load_ipython_extension(ipython): + """Called by IPython when this module is loaded as an IPython extension.""" + # Import here to avoid circular imports. + from bigquery_magics.bigquery import _cell_magic + + ipython.register_magic_function( + _cell_magic, magic_kind="cell", magic_name="bigquery" + ) + + +__all__ = ( + # For backwards compatibility we need to make the context available in + # the path google.cloud.bigquery.magics.context. + "context", + "__version__", + "load_ipython_extension", +) diff --git a/bigquery_magics/_versions_helpers.py b/bigquery_magics/_versions_helpers.py new file mode 100644 index 0000000..192011a --- /dev/null +++ b/bigquery_magics/_versions_helpers.py @@ -0,0 +1,91 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Shared helper functions for verifying versions of installed modules.""" + +from typing import Any + +from google.cloud.bigquery import exceptions +import packaging.version + +_MIN_BQ_STORAGE_VERSION = packaging.version.Version("2.0.0") + + +class BQStorageVersions: + """Version comparisons for google-cloud-bigqueyr-storage package.""" + + def __init__(self): + self._installed_version = None + + @property + def installed_version(self) -> packaging.version.Version: + """Return the parsed version of google-cloud-bigquery-storage.""" + if self._installed_version is None: + from google.cloud import bigquery_storage + + self._installed_version = packaging.version.parse( + # Use 0.0.0, since it is earlier than any released version. + # Legacy versions also have the same property, but + # creating a LegacyVersion has been deprecated. + # https://github.com/pypa/packaging/issues/321 + getattr(bigquery_storage, "__version__", "0.0.0") + ) + + return self._installed_version # type: ignore + + def try_import(self, raise_if_error: bool = False) -> Any: + """Tries to import the bigquery_storage module, and returns results + accordingly. It also verifies the module version is recent enough. + + If the import succeeds, returns the ``bigquery_storage`` module. + + If the import fails, + returns ``None`` when ``raise_if_error == False``, + raises Error when ``raise_if_error == True``. + + Returns: + The ``bigquery_storage`` module or ``None``. + + Raises: + exceptions.BigQueryStorageNotFoundError: + If google-cloud-bigquery-storage is not installed + exceptions.LegacyBigQueryStorageError: + If google-cloud-bigquery-storage package is outdated + """ + try: + from google.cloud import bigquery_storage # type: ignore + except ImportError: + if raise_if_error: + msg = ( + "Package google-cloud-bigquery-storage not found. " + "Install google-cloud-bigquery-storage version >= " + f"{_MIN_BQ_STORAGE_VERSION}." + ) + raise exceptions.BigQueryStorageNotFoundError(msg) + return None + + if self.installed_version < _MIN_BQ_STORAGE_VERSION: + if raise_if_error: + msg = ( + "Dependency google-cloud-bigquery-storage is outdated, " + f"please upgrade it to version >= {_MIN_BQ_STORAGE_VERSION} " + f"(version found: {self.installed_version})." + ) + raise exceptions.LegacyBigQueryStorageError(msg) + return None + + return bigquery_storage + + +BQ_STORAGE_VERSIONS = BQStorageVersions() diff --git a/bigquery_magics/magics.py b/bigquery_magics/bigquery.py similarity index 76% rename from bigquery_magics/magics.py rename to bigquery_magics/bigquery.py index 2a3583c..839e104 100644 --- a/bigquery_magics/magics.py +++ b/bigquery_magics/bigquery.py @@ -82,205 +82,35 @@ from __future__ import print_function -import re import ast +from concurrent import futures import copy import functools +import re import sys import time import warnings -from concurrent import futures - -try: - import IPython # type: ignore - from IPython import display # type: ignore - from IPython.core import magic_arguments # type: ignore -except ImportError: # pragma: NO COVER - raise ImportError("This module can only be loaded in IPython.") +import IPython # type: ignore +from IPython import display # type: ignore +from IPython.core import magic_arguments # type: ignore from google.api_core import client_info -from google.api_core import client_options from google.api_core.exceptions import NotFound -import google.auth # type: ignore from google.cloud import bigquery -import google.cloud.bigquery.dataset -from google.cloud.bigquery import _versions_helpers from google.cloud.bigquery import exceptions from google.cloud.bigquery.dbapi import _helpers -from google.cloud.bigquery.magics import line_arg_parser as lap - - -IPYTHON_USER_AGENT = "ipython-{}".format(IPython.__version__) - - -class Context(object): - """Storage for objects to be used throughout an IPython notebook session. - A Context object is initialized when the ``magics`` module is imported, - and can be found at ``google.cloud.bigquery.magics.context``. - """ +from bigquery_magics import line_arg_parser as lap +import bigquery_magics.config +import bigquery_magics.line_arg_parser.exceptions - def __init__(self): - self._credentials = None - self._project = None - self._connection = None - self._default_query_job_config = bigquery.QueryJobConfig() - self._bigquery_client_options = client_options.ClientOptions() - self._bqstorage_client_options = client_options.ClientOptions() - self._progress_bar_type = "tqdm_notebook" - - @property - def credentials(self): - """google.auth.credentials.Credentials: Credentials to use for queries - performed through IPython magics. - - Note: - These credentials do not need to be explicitly defined if you are - using Application Default Credentials. If you are not using - Application Default Credentials, manually construct a - :class:`google.auth.credentials.Credentials` object and set it as - the context credentials as demonstrated in the example below. See - `auth docs`_ for more information on obtaining credentials. - - Example: - Manually setting the context credentials: - - >>> from google.cloud.bigquery import magics - >>> from google.oauth2 import service_account - >>> credentials = (service_account - ... .Credentials.from_service_account_file( - ... '/path/to/key.json')) - >>> magics.context.credentials = credentials - - - .. _auth docs: http://google-auth.readthedocs.io - /en/latest/user-guide.html#obtaining-credentials - """ - if self._credentials is None: - self._credentials, _ = google.auth.default() - return self._credentials - - @credentials.setter - def credentials(self, value): - self._credentials = value - - @property - def project(self): - """str: Default project to use for queries performed through IPython - magics. - - Note: - The project does not need to be explicitly defined if you have an - environment default project set. If you do not have a default - project set in your environment, manually assign the project as - demonstrated in the example below. - - Example: - Manually setting the context project: - - >>> from google.cloud.bigquery import magics - >>> magics.context.project = 'my-project' - """ - if self._project is None: - _, self._project = google.auth.default() - return self._project - - @project.setter - def project(self, value): - self._project = value - - @property - def bigquery_client_options(self): - """google.api_core.client_options.ClientOptions: client options to be - used through IPython magics. - - Note:: - The client options do not need to be explicitly defined if no - special network connections are required. Normally you would be - using the https://bigquery.googleapis.com/ end point. - - Example: - Manually setting the endpoint: - - >>> from google.cloud.bigquery import magics - >>> client_options = {} - >>> client_options['api_endpoint'] = "https://some.special.url" - >>> magics.context.bigquery_client_options = client_options - """ - return self._bigquery_client_options - - @bigquery_client_options.setter - def bigquery_client_options(self, value): - self._bigquery_client_options = value - - @property - def bqstorage_client_options(self): - """google.api_core.client_options.ClientOptions: client options to be - used through IPython magics for the storage client. - - Note:: - The client options do not need to be explicitly defined if no - special network connections are required. Normally you would be - using the https://bigquerystorage.googleapis.com/ end point. - - Example: - Manually setting the endpoint: - - >>> from google.cloud.bigquery import magics - >>> client_options = {} - >>> client_options['api_endpoint'] = "https://some.special.url" - >>> magics.context.bqstorage_client_options = client_options - """ - return self._bqstorage_client_options - - @bqstorage_client_options.setter - def bqstorage_client_options(self, value): - self._bqstorage_client_options = value - - @property - def default_query_job_config(self): - """google.cloud.bigquery.job.QueryJobConfig: Default job - configuration for queries. - - The context's :class:`~google.cloud.bigquery.job.QueryJobConfig` is - used for queries. Some properties can be overridden with arguments to - the magics. - - Example: - Manually setting the default value for ``maximum_bytes_billed`` - to 100 MB: - - >>> from google.cloud.bigquery import magics - >>> magics.context.default_query_job_config.maximum_bytes_billed = 100000000 - """ - return self._default_query_job_config - - @default_query_job_config.setter - def default_query_job_config(self, value): - self._default_query_job_config = value - - @property - def progress_bar_type(self): - """str: Default progress bar type to use to display progress bar while - executing queries through IPython magics. - - Note:: - Install the ``tqdm`` package to use this feature. - - Example: - Manually setting the progress_bar_type: - - >>> from google.cloud.bigquery import magics - >>> magics.context.progress_bar_type = "tqdm_notebook" - """ - return self._progress_bar_type +try: + from google.cloud import bigquery_storage # type: ignore +except ImportError: + bigquery_storage = None - @progress_bar_type.setter - def progress_bar_type(self, value): - self._progress_bar_type = value - - -context = Context() +IPYTHON_USER_AGENT = "ipython-{}".format(IPython.__version__) +context = bigquery_magics.config.context def _handle_error(error, destination_var=None): @@ -288,7 +118,7 @@ def _handle_error(error, destination_var=None): Args: error (Exception): - An exception that ocurred during the query execution. + An exception that occurred during the query execution. destination_var (Optional[str]): The name of the IPython session variable to store the query job. """ @@ -508,6 +338,15 @@ def _create_dataset_if_necessary(client, dataset_id): "Defaults to use tqdm_notebook. Install the ``tqdm`` package to use this feature." ), ) +@magic_arguments.argument( + "--location", + type=str, + default=None, + help=( + "Set the location to execute query." + "Defaults to location set in query setting in console." + ), +) def _cell_magic(line, query): """Underlying function for bigquery cell magic @@ -550,7 +389,8 @@ def _cell_magic(line, query): "Storage API is already used by default.", category=DeprecationWarning, ) - use_bqstorage_api = not args.use_rest_api + use_bqstorage_api = not args.use_rest_api and (bigquery_storage is not None) + location = args.location params = [] if params_option_value: @@ -579,6 +419,7 @@ def _cell_magic(line, query): default_query_job_config=context.default_query_job_config, client_info=client_info.ClientInfo(user_agent=IPYTHON_USER_AGENT), client_options=bigquery_client_options, + location=location, ) if context._connection: client._connection = context._connection @@ -769,7 +610,9 @@ def _make_bqstorage_client(client, use_bqstorage_api, client_options): return None try: - _versions_helpers.BQ_STORAGE_VERSIONS.try_import(raise_if_error=True) + bigquery_magics._versions_helpers.BQ_STORAGE_VERSIONS.try_import( + raise_if_error=True + ) except exceptions.BigQueryStorageNotFoundError as err: customized_error = ImportError( "The default BigQuery Storage API client cannot be used, install " @@ -778,8 +621,6 @@ def _make_bqstorage_client(client, use_bqstorage_api, client_options): "the --use_rest_api magic option." ) raise customized_error from err - except exceptions.LegacyBigQueryStorageError: - pass try: from google.api_core.gapic_v1 import client_info as gapic_client_info diff --git a/bigquery_magics/config.py b/bigquery_magics/config.py new file mode 100644 index 0000000..7d354f0 --- /dev/null +++ b/bigquery_magics/config.py @@ -0,0 +1,187 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import google.api_core.client_options as client_options +import google.auth # type: ignore +import google.cloud.bigquery as bigquery + + +class Context(object): + """Storage for objects to be used throughout an IPython notebook session. + + A Context object is initialized when the ``bigquery_magics`` module is imported, + and can be found at ``bigquery_magics.context``. + """ + + def __init__(self): + self._credentials = None + self._project = None + self._connection = None + self._default_query_job_config = bigquery.QueryJobConfig() + self._bigquery_client_options = client_options.ClientOptions() + self._bqstorage_client_options = client_options.ClientOptions() + self._progress_bar_type = "tqdm_notebook" + + @property + def credentials(self): + """google.auth.credentials.Credentials: Credentials to use for queries + performed through IPython magics. + + Note: + These credentials do not need to be explicitly defined if you are + using Application Default Credentials. If you are not using + Application Default Credentials, manually construct a + :class:`google.auth.credentials.Credentials` object and set it as + the context credentials as demonstrated in the example below. See + `auth docs`_ for more information on obtaining credentials. + + Example: + Manually setting the context credentials: + + >>> from google.cloud.bigquery import magics + >>> from google.oauth2 import service_account + >>> credentials = (service_account + ... .Credentials.from_service_account_file( + ... '/path/to/key.json')) + >>> bigquery_magics.context.credentials = credentials + + + .. _auth docs: http://google-auth.readthedocs.io + /en/latest/user-guide.html#obtaining-credentials + """ + if self._credentials is None: + self._credentials, _ = google.auth.default() + return self._credentials + + @credentials.setter + def credentials(self, value): + self._credentials = value + + @property + def project(self): + """str: Default project to use for queries performed through IPython + magics. + + Note: + The project does not need to be explicitly defined if you have an + environment default project set. If you do not have a default + project set in your environment, manually assign the project as + demonstrated in the example below. + + Example: + Manually setting the context project: + + >>> from google.cloud.bigquery import magics + >>> bigquery_magics.context.project = 'my-project' + """ + if self._project is None: + _, self._project = google.auth.default() + return self._project + + @project.setter + def project(self, value): + self._project = value + + @property + def bigquery_client_options(self): + """google.api_core.client_options.ClientOptions: client options to be + used through IPython magics. + + Note:: + The client options do not need to be explicitly defined if no + special network connections are required. Normally you would be + using the https://bigquery.googleapis.com/ end point. + + Example: + Manually setting the endpoint: + + >>> from google.cloud.bigquery import magics + >>> client_options = {} + >>> client_options['api_endpoint'] = "https://some.special.url" + >>> bigquery_magics.context.bigquery_client_options = client_options + """ + return self._bigquery_client_options + + @bigquery_client_options.setter + def bigquery_client_options(self, value): + self._bigquery_client_options = value + + @property + def bqstorage_client_options(self): + """google.api_core.client_options.ClientOptions: client options to be + used through IPython magics for the storage client. + + Note:: + The client options do not need to be explicitly defined if no + special network connections are required. Normally you would be + using the https://bigquerystorage.googleapis.com/ end point. + + Example: + Manually setting the endpoint: + + >>> from google.cloud.bigquery import magics + >>> client_options = {} + >>> client_options['api_endpoint'] = "https://some.special.url" + >>> bigquery_magics.context.bqstorage_client_options = client_options + """ + return self._bqstorage_client_options + + @bqstorage_client_options.setter + def bqstorage_client_options(self, value): + self._bqstorage_client_options = value + + @property + def default_query_job_config(self): + """google.cloud.bigquery.job.QueryJobConfig: Default job + configuration for queries. + + The context's :class:`~google.cloud.bigquery.job.QueryJobConfig` is + used for queries. Some properties can be overridden with arguments to + the magics. + + Example: + Manually setting the default value for ``maximum_bytes_billed`` + to 100 MB: + + >>> from google.cloud.bigquery import magics + >>> bigquery_magics.context.default_query_job_config.maximum_bytes_billed = 100000000 + """ + return self._default_query_job_config + + @default_query_job_config.setter + def default_query_job_config(self, value): + self._default_query_job_config = value + + @property + def progress_bar_type(self): + """str: Default progress bar type to use to display progress bar while + executing queries through IPython magics. + + Note:: + Install the ``tqdm`` package to use this feature. + + Example: + Manually setting the progress_bar_type: + + >>> from google.cloud.bigquery import magics + >>> bigquery_magics.context.progress_bar_type = "tqdm_notebook" + """ + return self._progress_bar_type + + @progress_bar_type.setter + def progress_bar_type(self, value): + self._progress_bar_type = value + + +context = Context() diff --git a/bigquery_magics/line_arg_parser/__init__.py b/bigquery_magics/line_arg_parser/__init__.py index 9471446..dfa9ec6 100644 --- a/bigquery_magics/line_arg_parser/__init__.py +++ b/bigquery_magics/line_arg_parser/__init__.py @@ -12,16 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -from google.cloud.bigquery.magics.line_arg_parser.exceptions import ParseError -from google.cloud.bigquery.magics.line_arg_parser.exceptions import ( +from bigquery_magics.line_arg_parser.exceptions import ( DuplicateQueryParamsError, + ParseError, QueryParamsParseError, ) -from google.cloud.bigquery.magics.line_arg_parser.lexer import Lexer -from google.cloud.bigquery.magics.line_arg_parser.lexer import TokenType -from google.cloud.bigquery.magics.line_arg_parser.parser import Parser -from google.cloud.bigquery.magics.line_arg_parser.visitors import QueryParamsExtractor - +from bigquery_magics.line_arg_parser.lexer import Lexer, TokenType +from bigquery_magics.line_arg_parser.parser import Parser +from bigquery_magics.line_arg_parser.visitors import QueryParamsExtractor __all__ = ( "DuplicateQueryParamsError", diff --git a/bigquery_magics/line_arg_parser/lexer.py b/bigquery_magics/line_arg_parser/lexer.py index 71b287d..6e8b4cc 100644 --- a/bigquery_magics/line_arg_parser/lexer.py +++ b/bigquery_magics/line_arg_parser/lexer.py @@ -12,14 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from collections import namedtuple -from collections import OrderedDict +from collections import OrderedDict, namedtuple +import enum import itertools import re -import enum - - Token = namedtuple("Token", ("type_", "lexeme", "pos")) StateTransition = namedtuple("StateTransition", ("new_state", "total_offset")) diff --git a/bigquery_magics/line_arg_parser/parser.py b/bigquery_magics/line_arg_parser/parser.py index b9da20c..390b7bb 100644 --- a/bigquery_magics/line_arg_parser/parser.py +++ b/bigquery_magics/line_arg_parser/parser.py @@ -12,10 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from google.cloud.bigquery.magics.line_arg_parser import DuplicateQueryParamsError -from google.cloud.bigquery.magics.line_arg_parser import ParseError -from google.cloud.bigquery.magics.line_arg_parser import QueryParamsParseError -from google.cloud.bigquery.magics.line_arg_parser import TokenType +import bigquery_magics.line_arg_parser.exceptions as lap_exceptions +import bigquery_magics.line_arg_parser.lexer as lap_lexer class ParseNode(object): @@ -154,11 +152,11 @@ def get_next_token(self): token = next(self._tokens_iter) self._current_token = token - def consume(self, expected_type, exc_type=ParseError): + def consume(self, expected_type, exc_type=lap_exceptions.ParseError): """Move to the next token in token stream if it matches the expected type. Args: - expected_type (lexer.TokenType): The expected token type to be consumed. + expected_type (bigquery_magics.line_arg_parser.lexer.TokenType): The expected token type to be consumed. exc_type (Optional[ParseError]): The type of the exception to raise. Should be the ``ParseError`` class or one of its subclasses. Defaults to ``ParseError``. @@ -167,10 +165,10 @@ def consume(self, expected_type, exc_type=ParseError): ParseError: If the current token does not match the expected type. """ if self._current_token.type_ == expected_type: - if expected_type != TokenType.EOL: + if expected_type != lap_lexer.TokenType.EOL: self.get_next_token() else: - if self._current_token.type_ == TokenType.EOL: + if self._current_token.type_ == lap_lexer.TokenType.EOL: msg = "Unexpected end of input, expected {}.".format(expected_type) else: msg = "Expected token type {}, but found {} at position {}.".format( @@ -178,11 +176,11 @@ def consume(self, expected_type, exc_type=ParseError): ) self.error(message=msg, exc_type=exc_type) - def error(self, message="Syntax error.", exc_type=ParseError): + def error(self, message="Syntax error.", exc_type=lap_exceptions.ParseError): """Raise an error with the given message. Args: - expected_type (lexer.TokenType): The expected token type to be consumed. + expected_type (bigquery_magics.line_arg_parser.lexer.TokenType): The expected token type to be consumed. exc_type (Optional[ParseError]): The type of the exception to raise. Should be the ``ParseError`` class or one of its subclasses. Defaults to ``ParseError``. @@ -204,7 +202,7 @@ def input_line(self): token = self._current_token - if token.type_ != TokenType.EOL: + if token.type_ != lap_lexer.TokenType.EOL: msg = "Unexpected input at position {}: {}".format(token.pos, token.lexeme) self.error(msg) @@ -219,10 +217,10 @@ def destination_var(self): """ token = self._current_token - if token.type_ == TokenType.DEST_VAR: - self.consume(TokenType.DEST_VAR) + if token.type_ == lap_lexer.TokenType.DEST_VAR: + self.consume(lap_lexer.TokenType.DEST_VAR) result = DestinationVar(token) - elif token.type_ == TokenType.UNKNOWN: + elif token.type_ == lap_lexer.TokenType.UNKNOWN: msg = "Unknown input at position {}: {}".format(token.pos, token.lexeme) self.error(msg) else: @@ -242,15 +240,15 @@ def option_list(self): all_options = [] def parse_nonparams_options(): - while self._current_token.type_ == TokenType.OPTION_SPEC: + while self._current_token.type_ == lap_lexer.TokenType.OPTION_SPEC: token = self._current_token - self.consume(TokenType.OPTION_SPEC) + self.consume(lap_lexer.TokenType.OPTION_SPEC) opt_name = token.lexeme[2:] # cut off the "--" prefix # skip the optional "=" character - if self._current_token.type_ == TokenType.OPTION_EQ: - self.consume(TokenType.OPTION_EQ) + if self._current_token.type_ == lap_lexer.TokenType.OPTION_EQ: + self.consume(lap_lexer.TokenType.OPTION_EQ) opt_value = self.option_value() option = CmdOption(opt_name, opt_value) @@ -260,15 +258,16 @@ def parse_nonparams_options(): token = self._current_token - if token.type_ == TokenType.PARAMS_OPT_SPEC: + if token.type_ == lap_lexer.TokenType.PARAMS_OPT_SPEC: option = self.params_option() all_options.append(option) parse_nonparams_options() - if self._current_token.type_ == TokenType.PARAMS_OPT_SPEC: + if self._current_token.type_ == lap_lexer.TokenType.PARAMS_OPT_SPEC: self.error( - message="Duplicate --params option", exc_type=DuplicateQueryParamsError + message="Duplicate --params option", + exc_type=lap_exceptions.DuplicateQueryParamsError, ) return CmdOptionList(all_options) @@ -282,10 +281,10 @@ def option_value(self): """ token = self._current_token - if token.type_ == TokenType.OPT_VAL: - self.consume(TokenType.OPT_VAL) + if token.type_ == lap_lexer.TokenType.OPT_VAL: + self.consume(lap_lexer.TokenType.OPT_VAL) result = CmdOptionValue(token) - elif token.type_ == TokenType.UNKNOWN: + elif token.type_ == lap_lexer.TokenType.UNKNOWN: msg = "Unknown input at position {}: {}".format(token.pos, token.lexeme) self.error(msg) else: @@ -301,19 +300,22 @@ def params_option(self): params_option : PARAMS_OPT_SPEC [PARAMS_OPT_EQ] \ (DOLLAR_PY_ID | PY_STRING | py_dict) """ - self.consume(TokenType.PARAMS_OPT_SPEC) + self.consume(lap_lexer.TokenType.PARAMS_OPT_SPEC) # skip the optional "=" character - if self._current_token.type_ == TokenType.PARAMS_OPT_EQ: - self.consume(TokenType.PARAMS_OPT_EQ) + if self._current_token.type_ == lap_lexer.TokenType.PARAMS_OPT_EQ: + self.consume(lap_lexer.TokenType.PARAMS_OPT_EQ) - if self._current_token.type_ == TokenType.DOLLAR_PY_ID: + if self._current_token.type_ == lap_lexer.TokenType.DOLLAR_PY_ID: token = self._current_token - self.consume(TokenType.DOLLAR_PY_ID) + self.consume(lap_lexer.TokenType.DOLLAR_PY_ID) opt_value = PyVarExpansion(token) - elif self._current_token.type_ == TokenType.PY_STRING: + elif self._current_token.type_ == lap_lexer.TokenType.PY_STRING: token = self._current_token - self.consume(TokenType.PY_STRING, exc_type=QueryParamsParseError) + self.consume( + lap_lexer.TokenType.PY_STRING, + exc_type=lap_exceptions.QueryParamsParseError, + ) opt_value = PyScalarValue(token, token.lexeme) else: opt_value = self.py_dict() @@ -329,9 +331,13 @@ def py_dict(self): py_dict : LCURL dict_items RCURL """ - self.consume(TokenType.LCURL, exc_type=QueryParamsParseError) + self.consume( + lap_lexer.TokenType.LCURL, exc_type=lap_exceptions.QueryParamsParseError + ) dict_items = self.dict_items() - self.consume(TokenType.RCURL, exc_type=QueryParamsParseError) + self.consume( + lap_lexer.TokenType.RCURL, exc_type=lap_exceptions.QueryParamsParseError + ) return PyDict(dict_items) @@ -348,8 +354,10 @@ def dict_items(self): if item is not None: result.append(item) - while self._current_token.type_ == TokenType.COMMA: - self.consume(TokenType.COMMA, exc_type=QueryParamsParseError) + while self._current_token.type_ == lap_lexer.TokenType.COMMA: + self.consume( + lap_lexer.TokenType.COMMA, exc_type=lap_exceptions.QueryParamsParseError + ) item = self.dict_item() if item is not None: result.append(item) @@ -365,14 +373,16 @@ def dict_item(self): """ token = self._current_token - if token.type_ == TokenType.PY_STRING: + if token.type_ == lap_lexer.TokenType.PY_STRING: key = self.dict_key() - self.consume(TokenType.COLON, exc_type=QueryParamsParseError) + self.consume( + lap_lexer.TokenType.COLON, exc_type=lap_exceptions.QueryParamsParseError + ) value = self.py_value() result = PyDictItem(key, value) - elif token.type_ == TokenType.UNKNOWN: + elif token.type_ == lap_lexer.TokenType.UNKNOWN: msg = "Unknown input at position {}: {}".format(token.pos, token.lexeme) - self.error(msg, exc_type=QueryParamsParseError) + self.error(msg, exc_type=lap_exceptions.QueryParamsParseError) else: result = None @@ -386,7 +396,9 @@ def dict_key(self): dict_key : PY_STRING """ token = self._current_token - self.consume(TokenType.PY_STRING, exc_type=QueryParamsParseError) + self.consume( + lap_lexer.TokenType.PY_STRING, exc_type=lap_exceptions.QueryParamsParseError + ) return PyDictKey(token) def py_value(self): @@ -398,29 +410,38 @@ def py_value(self): """ token = self._current_token - if token.type_ == TokenType.PY_BOOL: - self.consume(TokenType.PY_BOOL, exc_type=QueryParamsParseError) + if token.type_ == lap_lexer.TokenType.PY_BOOL: + self.consume( + lap_lexer.TokenType.PY_BOOL, + exc_type=lap_exceptions.QueryParamsParseError, + ) return PyScalarValue(token, token.lexeme) - elif token.type_ == TokenType.PY_NUMBER: - self.consume(TokenType.PY_NUMBER, exc_type=QueryParamsParseError) + elif token.type_ == lap_lexer.TokenType.PY_NUMBER: + self.consume( + lap_lexer.TokenType.PY_NUMBER, + exc_type=lap_exceptions.QueryParamsParseError, + ) return PyScalarValue(token, token.lexeme) - elif token.type_ == TokenType.PY_STRING: - self.consume(TokenType.PY_STRING, exc_type=QueryParamsParseError) + elif token.type_ == lap_lexer.TokenType.PY_STRING: + self.consume( + lap_lexer.TokenType.PY_STRING, + exc_type=lap_exceptions.QueryParamsParseError, + ) return PyScalarValue(token, token.lexeme) - elif token.type_ == TokenType.LPAREN: + elif token.type_ == lap_lexer.TokenType.LPAREN: tuple_node = self.py_tuple() return tuple_node - elif token.type_ == TokenType.LSQUARE: + elif token.type_ == lap_lexer.TokenType.LSQUARE: list_node = self.py_list() return list_node - elif token.type_ == TokenType.LCURL: + elif token.type_ == lap_lexer.TokenType.LCURL: dict_node = self.py_dict() return dict_node else: msg = "Unexpected token type {} at position {}.".format( token.type_, token.pos ) - self.error(msg, exc_type=QueryParamsParseError) + self.error(msg, exc_type=lap_exceptions.QueryParamsParseError) def py_tuple(self): """Implementation of the ``py_tuple`` grammar production rule. @@ -429,9 +450,13 @@ def py_tuple(self): py_tuple : LPAREN collection_items RPAREN """ - self.consume(TokenType.LPAREN, exc_type=QueryParamsParseError) + self.consume( + lap_lexer.TokenType.LPAREN, exc_type=lap_exceptions.QueryParamsParseError + ) items = self.collection_items() - self.consume(TokenType.RPAREN, exc_type=QueryParamsParseError) + self.consume( + lap_lexer.TokenType.RPAREN, exc_type=lap_exceptions.QueryParamsParseError + ) return PyTuple(items) @@ -442,9 +467,13 @@ def py_list(self): py_list : LSQUARE collection_items RSQUARE """ - self.consume(TokenType.LSQUARE, exc_type=QueryParamsParseError) + self.consume( + lap_lexer.TokenType.LSQUARE, exc_type=lap_exceptions.QueryParamsParseError + ) items = self.collection_items() - self.consume(TokenType.RSQUARE, exc_type=QueryParamsParseError) + self.consume( + lap_lexer.TokenType.RSQUARE, exc_type=lap_exceptions.QueryParamsParseError + ) return PyList(items) @@ -461,8 +490,10 @@ def collection_items(self): if item is not None: result.append(item) - while self._current_token.type_ == TokenType.COMMA: - self.consume(TokenType.COMMA, exc_type=QueryParamsParseError) + while self._current_token.type_ == lap_lexer.TokenType.COMMA: + self.consume( + lap_lexer.TokenType.COMMA, exc_type=lap_exceptions.QueryParamsParseError + ) item = self.collection_item() if item is not None: result.append(item) @@ -476,7 +507,10 @@ def collection_item(self): collection_item : py_value | EMPTY """ - if self._current_token.type_ not in {TokenType.RPAREN, TokenType.RSQUARE}: + if self._current_token.type_ not in { + lap_lexer.TokenType.RPAREN, + lap_lexer.TokenType.RSQUARE, + }: result = self.py_value() else: result = None # end of list/tuple items diff --git a/bigquery_magics/version.py b/bigquery_magics/version.py index 72b0b02..4027bb8 100644 --- a/bigquery_magics/version.py +++ b/bigquery_magics/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "0.0.1" \ No newline at end of file +__version__ = "0.0.1" diff --git a/docs/conf.py b/docs/conf.py index d0468e2..073edc7 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -24,9 +24,9 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys import os import shlex +import sys # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the diff --git a/noxfile.py b/noxfile.py index 08ee214..f5dd689 100644 --- a/noxfile.py +++ b/noxfile.py @@ -42,17 +42,32 @@ "pytest-cov", "pytest-asyncio", ] -UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [ + "google-cloud-testutils", +] UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] UNIT_TEST_DEPENDENCIES: List[str] = [] -UNIT_TEST_EXTRAS: List[str] = [ - "tqdm", -] +UNIT_TEST_EXTRAS: List[str] = [] UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = { - "3.8": [], + "3.7": [ + "bqstorage", + ], + "3.8": [ + "bqstorage", + ], + "3.9": [ + "bqstorage", + ], + "3.10": [ + "bqstorage", + ], + "3.11": [], + "3.12": [ + "bqstorage", + ], } -SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.12"] +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.11", "3.12"] SYSTEM_TEST_STANDARD_DEPENDENCIES: List[str] = [ "mock", "pytest", @@ -61,10 +76,25 @@ SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] SYSTEM_TEST_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_EXTRAS: List[str] = [ - "tqdm", -] -SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = { + "3.7": [ + "bqstorage", + ], + "3.8": [ + "bqstorage", + ], + "3.9": [ + "bqstorage", + ], + "3.10": [ + "bqstorage", + ], + "3.11": [], + "3.12": [ + "bqstorage", + ], +} CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() diff --git a/owlbot.py b/owlbot.py index c2de310..ca0d20e 100644 --- a/owlbot.py +++ b/owlbot.py @@ -1,4 +1,4 @@ -# Copyright 2018 Google LLC +# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,116 +13,82 @@ # limitations under the License. """This script is used to synthesize generated parts of this library.""" -from pathlib import Path -import textwrap + +import pathlib import synthtool as s from synthtool import gcp from synthtool.languages import python -REPO_ROOT = Path(__file__).parent.absolute() - -default_version = "v2" - -for library in s.get_staging_dirs(default_version): - # Avoid breaking change due to change in field renames. - # https://github.com/googleapis/python-bigquery/issues/319 - s.replace( - library / f"google/cloud/bigquery_{library.name}/types/standard_sql.py", - r"type_ ", - "type ", - ) - # Patch docs issue - s.replace( - library / f"google/cloud/bigquery_{library.name}/types/model.py", - r"""\"predicted_\"""", - """`predicted_`""", - ) - s.move(library / f"google/cloud/bigquery_{library.name}/types") -s.remove_staging_dirs() +REPO_ROOT = pathlib.Path(__file__).parent.absolute() common = gcp.CommonTemplates() # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- + +extras = ["bqstorage"] +extras_by_python = { + "3.7": extras, + "3.8": extras, + "3.9": extras, + "3.10": extras, + # Use a middle version of Python to test when no extras are installed. + "3.11": [], + "3.12": extras, +} templated_files = common.py_library( + unit_test_python_versions=["3.7", "3.8", "3.11", "3.12"], + system_test_python_versions=["3.8", "3.11", "3.12"], cov_level=100, - samples=True, - microgenerator=True, - split_system_tests=True, + unit_test_extras_by_python=extras_by_python, + unit_test_external_dependencies=["google-cloud-testutils"], + system_test_extras_by_python=extras_by_python, intersphinx_dependencies={ - "dateutil": "https://dateutil.readthedocs.io/en/latest/", - "geopandas": "https://geopandas.org/", "pandas": "https://pandas.pydata.org/pandas-docs/stable/", + "pydata-google-auth": "https://pydata-google-auth.readthedocs.io/en/latest/", }, ) - -# BigQuery has a custom multiprocessing note s.move( templated_files, excludes=[ - "noxfile.py", + # Multi-processing note isn't relevant, as bigquery-magics is responsible for + # creating clients, not the end user. "docs/multiprocessing.rst", - "docs/index.rst", - ".coveragerc", - ".github/CODEOWNERS", - # Include custom SNIPPETS_TESTS job for performance. - # https://github.com/googleapis/python-bigquery/issues/191 - ".kokoro/presubmit/presubmit.cfg", - ".github/workflows", # exclude gh actions as credentials are needed for tests - "README.rst", + "README.rst", ], ) -python.configure_previous_major_version_branches() # ---------------------------------------------------------------------------- -# Samples templates +# Fixup files # ---------------------------------------------------------------------------- -python.py_samples() +s.replace( + ["noxfile.py"], r"[\"']google[\"']", '"bigquery_magics"', +) + s.replace( - "docs/conf.py", - r'\{"members": True\}', - '{"members": True, "inherited-members": True}', + ["noxfile.py"], "--cov=google", "--cov=bigquery_magics", ) + + +# Workaround for https://github.com/googleapis/synthtool/issues/1317 s.replace( - "docs/conf.py", - r"exclude_patterns = \[", - '\\g<0>\n "google/cloud/bigquery_v2/**", # Legacy proto-based types.', + ["noxfile.py"], r'extras = "\[\]"', 'extras = ""', ) # ---------------------------------------------------------------------------- -# pytype-related changes +# Samples templates # ---------------------------------------------------------------------------- -# Add .pytype to .gitignore -s.replace(".gitignore", r"\.pytest_cache", "\\g<0>\n.pytype") +python.py_samples(skip_readmes=True) -# Add pytype config to setup.cfg -s.replace( - "setup.cfg", - r"universal = 1", - textwrap.dedent( - """ \\g<0> - - [pytype] - python_version = 3.8 - inputs = - google/cloud/ - exclude = - tests/ - google/cloud/bigquery_v2/ # Legacy proto-based types. - output = .pytype/ - disable = - # There's some issue with finding some pyi files, thus disabling. - # The issue https://github.com/google/pytype/issues/150 is closed, but the - # error still occurs for some reason. - pyi-error""" - ), -) +# ---------------------------------------------------------------------------- +# Final cleanup +# ---------------------------------------------------------------------------- -s.shell.run(["nox", "-s", "blacken"], hide_output=False) +s.shell.run(["nox", "-s", "format"], hide_output=False) for noxfile in REPO_ROOT.glob("samples/**/noxfile.py"): - s.shell.run(["nox", "-s", "blacken"], cwd=noxfile.parent, hide_output=False) + s.shell.run(["nox", "-s", "blacken"], cwd=noxfile.parent, hide_output=False) \ No newline at end of file diff --git a/setup.py b/setup.py index 207976d..d96d519 100644 --- a/setup.py +++ b/setup.py @@ -17,7 +17,6 @@ import setuptools - # Package metadata. name = "bigquery-magics" @@ -30,14 +29,31 @@ release_status = "Development Status :: 4 - Beta" dependencies = [ "db-dtypes>=0.3.0,<2.0.0dev", - "google-cloud-bigquery >= 3.0.0, <4.0.0dev", + "google-cloud-bigquery >= 3.13.0, <4.0.0dev", "ipywidgets>=7.7.1", "ipython>=7.23.1", "ipykernel>=6.0.0", + "packaging >= 20.0.0", "pandas>=1.1.0", "pyarrow >= 3.0.0", + "tqdm >= 4.7.4, <5.0.0dev", ] -extras = {} +extras = { + # bqstorage had a period where it was a required dependency, and has been + # moved back to optional due to bloat. See + # https://github.com/googleapis/python-bigquery/issues/1196 for more background. + "bqstorage": [ + "google-cloud-bigquery-storage >= 2.6.0, <3.0.0dev", + # Due to an issue in pip's dependency resolver, the `grpc` extra is not + # installed, even though `google-cloud-bigquery-storage` specifies it + # as `google-api-core[grpc]`. We thus need to explicitly specify it here. + # See: https://github.com/googleapis/python-bigquery/issues/83 The + # grpc.Channel.close() method isn't added until 1.32.0. + # https://github.com/grpc/grpc/pull/15254 + "grpcio >= 1.47.0, < 2.0dev", + "grpcio >= 1.49.1, < 2.0dev; python_version>='3.11'", + ], +} all_extras = [] diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index bd915d1..289ccec 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -6,9 +6,11 @@ # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 db-dtypes==0.3.0 -google-cloud-bigquery==3.0.0 +google-cloud-bigquery==3.13.0 +google-cloud-bigquery-storage==2.6.0 ipywidgets==7.7.1 ipython==7.23.1 ipykernel==6.0.0 pandas==1.1.0 pyarrow==3.0.0 +tqdm==4.7.4 \ No newline at end of file diff --git a/tests/system/test_bigquery.py b/tests/system/test_bigquery.py index 3d761cd..d7147eb 100644 --- a/tests/system/test_bigquery.py +++ b/tests/system/test_bigquery.py @@ -16,9 +16,8 @@ import re -import pytest import psutil - +import pytest IPython = pytest.importorskip("IPython") io = pytest.importorskip("IPython.utils.io") @@ -50,7 +49,7 @@ def test_bigquery_magic(ipython_interactive): current_process = psutil.Process() conn_count_start = len(current_process.connections()) - ip.extension_manager.load_extension("google.cloud.bigquery") + ip.extension_manager.load_extension("bigquery_magics") sql = """ SELECT CONCAT( diff --git a/tests/unit/line_arg_parser/test_parser.py b/tests/unit/line_arg_parser/test_parser.py index b170d53..2d6c62f 100644 --- a/tests/unit/line_arg_parser/test_parser.py +++ b/tests/unit/line_arg_parser/test_parser.py @@ -36,8 +36,7 @@ def test_consume_expected_eol(parser_class): def test_consume_unexpected_eol(parser_class): - from google.cloud.bigquery.magics.line_arg_parser import ParseError - from google.cloud.bigquery.magics.line_arg_parser import TokenType + from google.cloud.bigquery.magics.line_arg_parser import ParseError, TokenType from google.cloud.bigquery.magics.line_arg_parser.lexer import Token # A simple iterable of Tokens is sufficient. @@ -49,8 +48,7 @@ def test_consume_unexpected_eol(parser_class): def test_input_line_unexpected_input(parser_class): - from google.cloud.bigquery.magics.line_arg_parser import ParseError - from google.cloud.bigquery.magics.line_arg_parser import TokenType + from google.cloud.bigquery.magics.line_arg_parser import ParseError, TokenType from google.cloud.bigquery.magics.line_arg_parser.lexer import Token # A simple iterable of Tokens is sufficient. @@ -66,8 +64,7 @@ def test_input_line_unexpected_input(parser_class): def test_destination_var_unexpected_input(parser_class): - from google.cloud.bigquery.magics.line_arg_parser import ParseError - from google.cloud.bigquery.magics.line_arg_parser import TokenType + from google.cloud.bigquery.magics.line_arg_parser import ParseError, TokenType from google.cloud.bigquery.magics.line_arg_parser.lexer import Token # A simple iterable of Tokens is sufficient. @@ -82,8 +79,7 @@ def test_destination_var_unexpected_input(parser_class): def test_option_value_unexpected_input(parser_class): - from google.cloud.bigquery.magics.line_arg_parser import ParseError - from google.cloud.bigquery.magics.line_arg_parser import TokenType + from google.cloud.bigquery.magics.line_arg_parser import ParseError, TokenType from google.cloud.bigquery.magics.line_arg_parser.lexer import Token # A simple iterable of Tokens is sufficient. @@ -133,8 +129,7 @@ def test_dict_items_trailing_comma(parser_class): def test_dict_item_unknown_input(parser_class): - from google.cloud.bigquery.magics.line_arg_parser import ParseError - from google.cloud.bigquery.magics.line_arg_parser import TokenType + from google.cloud.bigquery.magics.line_arg_parser import ParseError, TokenType from google.cloud.bigquery.magics.line_arg_parser.lexer import Token # A simple iterable of Tokens is sufficient. @@ -148,8 +143,7 @@ def test_dict_item_unknown_input(parser_class): def test_pyvalue_list_containing_dict(parser_class): from google.cloud.bigquery.magics.line_arg_parser import TokenType from google.cloud.bigquery.magics.line_arg_parser.lexer import Token - from google.cloud.bigquery.magics.line_arg_parser.parser import PyDict - from google.cloud.bigquery.magics.line_arg_parser.parser import PyList + from google.cloud.bigquery.magics.line_arg_parser.parser import PyDict, PyList # A simple iterable of Tokens is sufficient. fake_lexer = [ @@ -180,8 +174,7 @@ def test_pyvalue_list_containing_dict(parser_class): def test_pyvalue_invalid_token(parser_class): - from google.cloud.bigquery.magics.line_arg_parser import ParseError - from google.cloud.bigquery.magics.line_arg_parser import TokenType + from google.cloud.bigquery.magics.line_arg_parser import ParseError, TokenType from google.cloud.bigquery.magics.line_arg_parser.lexer import Token # A simple iterable of Tokens is sufficient. diff --git a/tests/unit/test__versions_helpers.py b/tests/unit/test__versions_helpers.py new file mode 100644 index 0000000..80a690e --- /dev/null +++ b/tests/unit/test__versions_helpers.py @@ -0,0 +1,108 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from unittest import mock + +import pytest + +try: + from google.cloud import bigquery_storage # type: ignore +except ImportError: + bigquery_storage = None + +from google.cloud.bigquery import exceptions + +from bigquery_magics import _versions_helpers + + +@pytest.mark.skipif( + bigquery_storage is None, reason="Requires `google-cloud-bigquery-storage`" +) +def test_raises_no_error_w_recent_bqstorage(): + with mock.patch("google.cloud.bigquery_storage.__version__", new="2.0.0"): + try: + bqstorage_versions = _versions_helpers.BQStorageVersions() + bqstorage_versions.try_import(raise_if_error=True) + except exceptions.LegacyBigQueryStorageError: # pragma: NO COVER + raise ("Legacy error raised with a non-legacy dependency version.") + + +@pytest.mark.skipif( + bigquery_storage is None, reason="Requires `google-cloud-bigquery-storage`" +) +def test_raises_error_w_legacy_bqstorage(): + with mock.patch("google.cloud.bigquery_storage.__version__", new="1.9.9"): + with pytest.raises(exceptions.LegacyBigQueryStorageError): + bqstorage_versions = _versions_helpers.BQStorageVersions() + bqstorage_versions.try_import(raise_if_error=True) + + +@pytest.mark.skipif( + bigquery_storage is None, reason="Requires `google-cloud-bigquery-storage`" +) +def test_returns_none_with_legacy_bqstorage(): + with mock.patch("google.cloud.bigquery_storage.__version__", new="1.9.9"): + try: + bqstorage_versions = _versions_helpers.BQStorageVersions() + bq_storage = bqstorage_versions.try_import() + except exceptions.LegacyBigQueryStorageError: # pragma: NO COVER + raise ("Legacy error raised when raise_if_error == False.") + assert bq_storage is None + + +@pytest.mark.skipif( + bigquery_storage is not None, + reason="Tests behavior when `google-cloud-bigquery-storage` isn't installed", +) +def test_returns_none_with_bqstorage_uninstalled(): + try: + bqstorage_versions = _versions_helpers.BQStorageVersions() + bq_storage = bqstorage_versions.try_import() + except exceptions.LegacyBigQueryStorageError: # pragma: NO COVER + raise ("NotFound error raised when raise_if_error == False.") + assert bq_storage is None + + +@pytest.mark.skipif( + bigquery_storage is None, reason="Requires `google-cloud-bigquery-storage`" +) +def test_raises_error_w_unknown_bqstorage_version(): + with mock.patch("google.cloud.bigquery_storage", autospec=True) as fake_module: + del fake_module.__version__ + error_pattern = r"version found: 0.0.0" + with pytest.raises(exceptions.LegacyBigQueryStorageError, match=error_pattern): + bqstorage_versions = _versions_helpers.BQStorageVersions() + bqstorage_versions.try_import(raise_if_error=True) + + +@pytest.mark.skipif( + bigquery_storage is None, reason="Requires `google-cloud-bigquery-storage`" +) +def test_installed_bqstorage_version_returns_cached(): + bqstorage_versions = _versions_helpers.BQStorageVersions() + bqstorage_versions._installed_version = object() + assert bqstorage_versions.installed_version is bqstorage_versions._installed_version + + +@pytest.mark.skipif( + bigquery_storage is None, reason="Requires `google-cloud-bigquery-storage`" +) +def test_installed_bqstorage_version_returns_parsed_version(): + bqstorage_versions = _versions_helpers.BQStorageVersions() + with mock.patch("google.cloud.bigquery_storage.__version__", new="1.2.3"): + bqstorage_versions = bqstorage_versions.installed_version + + assert bqstorage_versions.major == 1 + assert bqstorage_versions.minor == 2 + assert bqstorage_versions.micro == 3 diff --git a/tests/unit/test_bigquery.py b/tests/unit/test_bigquery.py index 4b1aaf1..678aa56 100644 --- a/tests/unit/test_bigquery.py +++ b/tests/unit/test_bigquery.py @@ -12,36 +12,44 @@ # See the License for the specific language governing permissions and # limitations under the License. +from concurrent import futures +import contextlib import copy import re -from concurrent import futures from unittest import mock import warnings +import IPython +import IPython.terminal.interactiveshell as interactiveshell +import IPython.testing.tools as tools +import IPython.utils.io as io from google.api_core import exceptions import google.auth.credentials -import pytest -from tests.unit.helpers import make_connection -from test_utils.imports import maybe_fail_import - from google.cloud import bigquery from google.cloud.bigquery import exceptions as bq_exceptions -from google.cloud.bigquery import job -from google.cloud.bigquery import table +from google.cloud.bigquery import job, table +import google.cloud.bigquery._http +import google.cloud.bigquery.exceptions from google.cloud.bigquery.retry import DEFAULT_TIMEOUT +import pandas +import pytest +import test_utils.imports # google-cloud-testutils +import bigquery_magics +import bigquery_magics.bigquery as magics try: - from google.cloud.bigquery.magics import magics + import google.cloud.bigquery_storage as bigquery_storage except ImportError: - magics = None + bigquery_storage = None + -bigquery_storage = pytest.importorskip("google.cloud.bigquery_storage") -IPython = pytest.importorskip("IPython") -interactiveshell = pytest.importorskip("IPython.terminal.interactiveshell") -tools = pytest.importorskip("IPython.testing.tools") -io = pytest.importorskip("IPython.utils.io") -pandas = pytest.importorskip("pandas") +def make_connection(*args): + # TODO(tswast): Remove this in favor of a mock google.cloud.bigquery.Client + # in tests. + conn = mock.create_autospec(google.cloud.bigquery._http.Connection, instance=True) + conn.api_request.side_effect = args + return conn @pytest.fixture(scope="session") @@ -87,7 +95,7 @@ def fail_if(name, globals, locals, fromlist, level): fromlist is not None and "bigquery_storage" in fromlist ) - return maybe_fail_import(predicate=fail_if) + return test_utils.imports.maybe_fail_import(predicate=fail_if) @pytest.fixture(scope="session") @@ -98,7 +106,7 @@ def fail_if(name, globals, locals, fromlist, level): # NOTE: *very* simplified, assuming a straightforward absolute import return "gapic_v1" in name or (fromlist is not None and "gapic_v1" in fromlist) - return maybe_fail_import(predicate=fail_if) + return test_utils.imports.maybe_fail_import(predicate=fail_if) PROJECT_ID = "its-a-project-eh" @@ -136,8 +144,8 @@ def test_context_with_default_credentials(): """When Application Default Credentials are set, the context credentials will be created the first time it is called """ - assert magics.context._credentials is None - assert magics.context._project is None + assert bigquery_magics.context._credentials is None + assert bigquery_magics.context._project is None project = "prahj-ekt" credentials_mock = mock.create_autospec( @@ -147,20 +155,19 @@ def test_context_with_default_credentials(): "google.auth.default", return_value=(credentials_mock, project) ) with default_patch as default_mock: - assert magics.context.credentials is credentials_mock - assert magics.context.project == project + assert bigquery_magics.context.credentials is credentials_mock + assert bigquery_magics.context.project == project assert default_mock.call_count == 2 @pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_context_with_default_connection(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._credentials = None - magics.context._project = None - magics.context._connection = None + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context._credentials = None + bigquery_magics.context._project = None + bigquery_magics.context._connection = None default_credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True @@ -193,6 +200,7 @@ def test_context_with_default_connection(): path=f"/projects/{PROJECT_ID}/queries/{JOB_ID}", query_params=mock.ANY, timeout=mock.ANY, + headers=mock.ANY, ) default_conn.api_request.assert_has_calls([begin_call, query_results_call]) @@ -207,23 +215,22 @@ def test_context_credentials_and_project_can_be_set_explicitly(): "google.auth.default", return_value=(credentials_mock, project1) ) with default_patch as default_mock: - magics.context.credentials = credentials_mock - magics.context.project = project2 + bigquery_magics.context.credentials = credentials_mock + bigquery_magics.context.project = project2 - assert magics.context.project == project2 - assert magics.context.credentials is credentials_mock + assert bigquery_magics.context.project == project2 + assert bigquery_magics.context.credentials is credentials_mock # default should not be called if credentials & project are explicitly set assert default_mock.call_count == 0 @pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_context_with_custom_connection(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None - magics.context._credentials = None - context_conn = magics.context._connection = make_connection( + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context._project = None + bigquery_magics.context._credentials = None + context_conn = bigquery_magics.context._connection = make_connection( QUERY_RESOURCE, QUERY_RESULTS_RESOURCE ) @@ -257,12 +264,13 @@ def test_context_with_custom_connection(): path=f"/projects/{PROJECT_ID}/queries/{JOB_ID}", query_params=mock.ANY, timeout=mock.ANY, + headers=mock.ANY, ) context_conn.api_request.assert_has_calls([begin_call, query_results_call]) def test__run_query(): - magics.context._credentials = None + bigquery_magics.context._credentials = None job_id = "job_1234" sql = "SELECT 17" @@ -272,9 +280,7 @@ def test__run_query(): [table.Row((17,), {"num": 0})], ] - client_patch = mock.patch( - "google.cloud.bigquery.magics.magics.bigquery.Client", autospec=True - ) + client_patch = mock.patch("bigquery_magics.bigquery.bigquery.Client", autospec=True) with client_patch as client_mock, io.capture_output() as captured: client_mock().query(sql).result.side_effect = responses client_mock().query(sql).job_id = job_id @@ -295,13 +301,11 @@ def test__run_query(): def test__run_query_dry_run_without_errors_is_silent(): - magics.context._credentials = None + bigquery_magics.context._credentials = None sql = "SELECT 17" - client_patch = mock.patch( - "google.cloud.bigquery.magics.magics.bigquery.Client", autospec=True - ) + client_patch = mock.patch("bigquery_magics.bigquery.bigquery.Client", autospec=True) job_config = job.QueryJobConfig() job_config.dry_run = True @@ -372,28 +376,20 @@ def test__make_bqstorage_client_true_obsolete_dependency(): ) patcher = mock.patch( - "google.cloud.bigquery._versions_helpers.BQ_STORAGE_VERSIONS.try_import", + "bigquery_magics._versions_helpers.BQ_STORAGE_VERSIONS.try_import", side_effect=bq_exceptions.LegacyBigQueryStorageError( "google-cloud-bigquery-storage is outdated" ), ) - with patcher, warnings.catch_warnings(record=True) as warned: - got = magics._make_bqstorage_client(test_client, True, {}) - - assert got is None - - matching_warnings = [ - warning - for warning in warned - if "google-cloud-bigquery-storage is outdated" in str(warning) - ] - assert matching_warnings, "Obsolete dependency warning not raised." + with patcher, pytest.raises( + google.cloud.bigquery.exceptions.LegacyBigQueryStorageError + ): + magics._make_bqstorage_client(test_client, True, {}) @pytest.mark.skipif( bigquery_storage is None, reason="Requires `google-cloud-bigquery-storage`" ) -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test__make_bqstorage_client_true_missing_gapic(missing_grpcio_lib): credentials_mock = mock.create_autospec( google.auth.credentials.Credentials, instance=True @@ -410,9 +406,7 @@ def test__create_dataset_if_necessary_exists(): dataset_id = "dataset_id" dataset_reference = bigquery.dataset.DatasetReference(project, dataset_id) dataset = bigquery.Dataset(dataset_reference) - client_patch = mock.patch( - "google.cloud.bigquery.magics.magics.bigquery.Client", autospec=True - ) + client_patch = mock.patch("bigquery_magics.bigquery.bigquery.Client", autospec=True) with client_patch as client_mock: client = client_mock() client.project = project @@ -424,9 +418,7 @@ def test__create_dataset_if_necessary_exists(): def test__create_dataset_if_necessary_not_exist(): project = "project_id" dataset_id = "dataset_id" - client_patch = mock.patch( - "google.cloud.bigquery.magics.magics.bigquery.Client", autospec=True - ) + client_patch = mock.patch("bigquery_magics.bigquery.bigquery.Client", autospec=True) with client_patch as client_mock: client = client_mock() client.location = "us" @@ -439,28 +431,27 @@ def test__create_dataset_if_necessary_not_exist(): @pytest.mark.usefixtures("ipython_interactive") def test_extension_load(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") + ip.extension_manager.load_extension("bigquery_magics") # verify that the magic is registered and has the correct source magic = ip.magics_manager.magics["cell"].get("bigquery") - assert magic.__module__ == "google.cloud.bigquery.magics.magics" + assert magic.__module__ == "bigquery_magics.bigquery" @pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") @pytest.mark.skipif( bigquery_storage is None, reason="Requires `google-cloud-bigquery-storage`" ) def test_bigquery_magic_without_optional_arguments(monkeypatch): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") + ip.extension_manager.load_extension("bigquery_magics") mock_credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) # Set up the context with monkeypatch so that it's reset for subsequent # tests. - monkeypatch.setattr(magics.context, "_credentials", mock_credentials) + monkeypatch.setattr(bigquery_magics.context, "_credentials", mock_credentials) # Mock out the BigQuery Storage API. bqstorage_mock = mock.create_autospec(bigquery_storage.BigQueryReadClient) @@ -475,9 +466,7 @@ def test_bigquery_magic_without_optional_arguments(monkeypatch): sql = "SELECT 17 AS num" result = pandas.DataFrame([17], columns=["num"]) - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) query_job_mock = mock.create_autospec( google.cloud.bigquery.job.QueryJob, instance=True ) @@ -496,8 +485,8 @@ def test_bigquery_magic_without_optional_arguments(monkeypatch): @pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_default_connection_user_agent(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._connection = None + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context._connection = None credentials_mock = mock.create_autospec( google.auth.credentials.Credentials, instance=True @@ -505,9 +494,7 @@ def test_bigquery_magic_default_connection_user_agent(): default_patch = mock.patch( "google.auth.default", return_value=(credentials_mock, "general-project") ) - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) conn_patch = mock.patch("google.cloud.bigquery.client.Connection", autospec=True) with conn_patch as conn, run_query_patch, default_patch: @@ -521,14 +508,12 @@ def test_bigquery_magic_default_connection_user_agent(): @pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_with_legacy_sql(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context.credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) with run_query_patch as run_query_mock: ip.run_cell_magic("bigquery", "--use_legacy_sql", "SELECT 17 AS num") @@ -537,11 +522,10 @@ def test_bigquery_magic_with_legacy_sql(): @pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_bigquery_magic_with_result_saved_to_variable(ipython_ns_cleanup): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context.credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) @@ -551,9 +535,7 @@ def test_bigquery_magic_with_result_saved_to_variable(ipython_ns_cleanup): result = pandas.DataFrame([17], columns=["num"]) assert "df" not in ip.user_ns - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) query_job_mock = mock.create_autospec( google.cloud.bigquery.job.QueryJob, instance=True ) @@ -573,18 +555,16 @@ def test_bigquery_magic_with_result_saved_to_variable(ipython_ns_cleanup): @pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_does_not_clear_display_in_verbose_mode(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context.credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) clear_patch = mock.patch( - "google.cloud.bigquery.magics.magics.display.clear_output", + "bigquery_magics.bigquery.display.clear_output", autospec=True, ) - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) with clear_patch as clear_mock, run_query_patch: ip.run_cell_magic("bigquery", "--verbose", "SELECT 17 as num") @@ -594,18 +574,16 @@ def test_bigquery_magic_does_not_clear_display_in_verbose_mode(): @pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_clears_display_in_non_verbose_mode(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context.credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) clear_patch = mock.patch( - "google.cloud.bigquery.magics.magics.display.clear_output", + "bigquery_magics.bigquery.display.clear_output", autospec=True, ) - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) with clear_patch as clear_mock, run_query_patch: ip.run_cell_magic("bigquery", "", "SELECT 17 as num") @@ -618,14 +596,14 @@ def test_bigquery_magic_clears_display_in_non_verbose_mode(): ) def test_bigquery_magic_with_bqstorage_from_argument(monkeypatch): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") + ip.extension_manager.load_extension("bigquery_magics") mock_credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) # Set up the context with monkeypatch so that it's reset for subsequent # tests. - monkeypatch.setattr(magics.context, "_credentials", mock_credentials) + monkeypatch.setattr(bigquery_magics.context, "_credentials", mock_credentials) # Mock out the BigQuery Storage API. bqstorage_mock = mock.create_autospec(bigquery_storage.BigQueryReadClient) @@ -640,9 +618,7 @@ def test_bigquery_magic_with_bqstorage_from_argument(monkeypatch): sql = "SELECT 17 AS num" result = pandas.DataFrame([17], columns=["num"]) - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) query_job_mock = mock.create_autospec( google.cloud.bigquery.job.QueryJob, instance=True ) @@ -686,14 +662,14 @@ def test_bigquery_magic_with_rest_client_requested(monkeypatch): pandas = pytest.importorskip("pandas") ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") + ip.extension_manager.load_extension("bigquery_magics") mock_credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) # Set up the context with monkeypatch so that it's reset for subsequent # tests. - monkeypatch.setattr(magics.context, "_credentials", mock_credentials) + monkeypatch.setattr(bigquery_magics.context, "_credentials", mock_credentials) # Mock out the BigQuery Storage API. bqstorage_mock = mock.create_autospec(bigquery_storage.BigQueryReadClient) @@ -703,9 +679,7 @@ def test_bigquery_magic_with_rest_client_requested(monkeypatch): sql = "SELECT 17 AS num" result = pandas.DataFrame([17], columns=["num"]) - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) query_job_mock = mock.create_autospec( google.cloud.bigquery.job.QueryJob, instance=True ) @@ -728,8 +702,8 @@ def test_bigquery_magic_with_rest_client_requested(monkeypatch): @pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_w_max_results_invalid(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context._project = None credentials_mock = mock.create_autospec( google.auth.credentials.Credentials, instance=True @@ -750,8 +724,8 @@ def test_bigquery_magic_w_max_results_invalid(): @pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_w_max_results_valid_calls_queryjob_result(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context._project = None credentials_mock = mock.create_autospec( google.auth.credentials.Credentials, instance=True @@ -784,8 +758,8 @@ def test_bigquery_magic_w_max_results_valid_calls_queryjob_result(): @pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_w_max_results_query_job_results_fails(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context._project = None credentials_mock = mock.create_autospec( google.auth.credentials.Credentials, instance=True @@ -797,7 +771,7 @@ def test_bigquery_magic_w_max_results_query_job_results_fails(): "google.cloud.bigquery.client.Client.query", autospec=True ) close_transports_patch = mock.patch( - "google.cloud.bigquery.magics.magics._close_transports", + "bigquery_magics.bigquery._close_transports", autospec=True, ) @@ -821,8 +795,8 @@ def test_bigquery_magic_w_max_results_query_job_results_fails(): def test_bigquery_magic_w_table_id_invalid(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context._project = None credentials_mock = mock.create_autospec( google.auth.credentials.Credentials, instance=True @@ -832,7 +806,7 @@ def test_bigquery_magic_w_table_id_invalid(): ) list_rows_patch = mock.patch( - "google.cloud.bigquery.magics.magics.bigquery.Client.list_rows", + "bigquery_magics.bigquery.bigquery.Client.list_rows", autospec=True, side_effect=exceptions.BadRequest("Not a valid table ID"), ) @@ -850,8 +824,8 @@ def test_bigquery_magic_w_table_id_invalid(): def test_bigquery_magic_w_missing_query(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context._project = None credentials_mock = mock.create_autospec( google.auth.credentials.Credentials, instance=True @@ -872,11 +846,10 @@ def test_bigquery_magic_w_missing_query(): @pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_bigquery_magic_w_table_id_and_destination_var(ipython_ns_cleanup): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context._project = None ipython_ns_cleanup.append((ip, "df")) @@ -891,9 +864,7 @@ def test_bigquery_magic_w_table_id_and_destination_var(ipython_ns_cleanup): google.cloud.bigquery.table.RowIterator, instance=True ) - client_patch = mock.patch( - "google.cloud.bigquery.magics.magics.bigquery.Client", autospec=True - ) + client_patch = mock.patch("bigquery_magics.bigquery.bigquery.Client", autospec=True) table_id = "bigquery-public-data.samples.shakespeare" result = pandas.DataFrame([17], columns=["num"]) @@ -914,11 +885,10 @@ def test_bigquery_magic_w_table_id_and_destination_var(ipython_ns_cleanup): @pytest.mark.skipif( bigquery_storage is None, reason="Requires `google-cloud-bigquery-storage`" ) -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_bigquery_magic_w_table_id_and_bqstorage_client(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context._project = None credentials_mock = mock.create_autospec( google.auth.credentials.Credentials, instance=True @@ -931,9 +901,7 @@ def test_bigquery_magic_w_table_id_and_bqstorage_client(): google.cloud.bigquery.table.RowIterator, instance=True ) - client_patch = mock.patch( - "google.cloud.bigquery.magics.magics.bigquery.Client", autospec=True - ) + client_patch = mock.patch("bigquery_magics.bigquery.bigquery.Client", autospec=True) bqstorage_mock = mock.create_autospec(bigquery_storage.BigQueryReadClient) bqstorage_instance_mock = mock.create_autospec( @@ -961,14 +929,12 @@ def test_bigquery_magic_w_table_id_and_bqstorage_client(): @pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_dryrun_option_sets_job_config(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context.credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) sql = "SELECT 17 AS num" @@ -982,16 +948,14 @@ def test_bigquery_magic_dryrun_option_sets_job_config(): @pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_dryrun_option_returns_query_job(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context.credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) query_job_mock = mock.create_autospec( google.cloud.bigquery.job.QueryJob, instance=True ) - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) sql = "SELECT 17 AS num" @@ -1006,15 +970,15 @@ def test_bigquery_magic_dryrun_option_returns_query_job(): @pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_dryrun_option_variable_error_message(ipython_ns_cleanup): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context.credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) ipython_ns_cleanup.append((ip, "q_job")) run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", + "bigquery_magics.bigquery._run_query", autospec=True, side_effect=exceptions.BadRequest("Syntax error in SQL query"), ) @@ -1033,16 +997,14 @@ def test_bigquery_magic_dryrun_option_variable_error_message(ipython_ns_cleanup) @pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_dryrun_option_saves_query_job_to_variable(ipython_ns_cleanup): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context.credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) query_job_mock = mock.create_autospec( google.cloud.bigquery.job.QueryJob, instance=True ) - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) ipython_ns_cleanup.append((ip, "q_job")) @@ -1063,8 +1025,8 @@ def test_bigquery_magic_dryrun_option_saves_query_job_to_variable(ipython_ns_cle @pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_saves_query_job_to_variable_on_error(ipython_ns_cleanup): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context.credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) @@ -1096,8 +1058,8 @@ def test_bigquery_magic_saves_query_job_to_variable_on_error(ipython_ns_cleanup) @pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_w_maximum_bytes_billed_invalid(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context._project = None credentials_mock = mock.create_autospec( google.auth.credentials.Credentials, instance=True @@ -1117,14 +1079,13 @@ def test_bigquery_magic_w_maximum_bytes_billed_invalid(): "param_value,expected", [("987654321", "987654321"), ("None", "0")] ) @pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_bigquery_magic_w_maximum_bytes_billed_overrides_context(param_value, expected): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context._project = None # Set the default maximum bytes billed, so we know it's overridable by the param. - magics.context.default_query_job_config.maximum_bytes_billed = 1234567 + bigquery_magics.context.default_query_job_config.maximum_bytes_billed = 1234567 project = "test-project" job_reference = copy.deepcopy(JOB_REFERENCE_RESOURCE) @@ -1141,7 +1102,9 @@ def test_bigquery_magic_w_maximum_bytes_billed_overrides_context(param_value, ex default_patch = mock.patch( "google.auth.default", return_value=(credentials_mock, "general-project") ) - conn = magics.context._connection = make_connection(resource, query_results, data) + conn = bigquery_magics.context._connection = make_connection( + resource, query_results, data + ) list_rows_patch = mock.patch( "google.cloud.bigquery.client.Client._list_rows_from_query_results", return_value=google.cloud.bigquery.table._EmptyRowIterator(), @@ -1157,13 +1120,12 @@ def test_bigquery_magic_w_maximum_bytes_billed_overrides_context(param_value, ex @pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_bigquery_magic_w_maximum_bytes_billed_w_context_inplace(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context._project = None - magics.context.default_query_job_config.maximum_bytes_billed = 1337 + bigquery_magics.context.default_query_job_config.maximum_bytes_billed = 1337 project = "test-project" job_reference = copy.deepcopy(JOB_REFERENCE_RESOURCE) @@ -1180,7 +1142,9 @@ def test_bigquery_magic_w_maximum_bytes_billed_w_context_inplace(): default_patch = mock.patch( "google.auth.default", return_value=(credentials_mock, "general-project") ) - conn = magics.context._connection = make_connection(resource, query_results, data) + conn = bigquery_magics.context._connection = make_connection( + resource, query_results, data + ) list_rows_patch = mock.patch( "google.cloud.bigquery.client.Client._list_rows_from_query_results", return_value=google.cloud.bigquery.table._EmptyRowIterator(), @@ -1194,13 +1158,12 @@ def test_bigquery_magic_w_maximum_bytes_billed_w_context_inplace(): @pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_bigquery_magic_w_maximum_bytes_billed_w_context_setter(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context._project = None - magics.context.default_query_job_config = job.QueryJobConfig( + bigquery_magics.context.default_query_job_config = job.QueryJobConfig( maximum_bytes_billed=10203 ) @@ -1219,7 +1182,9 @@ def test_bigquery_magic_w_maximum_bytes_billed_w_context_setter(): default_patch = mock.patch( "google.auth.default", return_value=(credentials_mock, "general-project") ) - conn = magics.context._connection = make_connection(resource, query_results, data) + conn = bigquery_magics.context._connection = make_connection( + resource, query_results, data + ) list_rows_patch = mock.patch( "google.cloud.bigquery.client.Client._list_rows_from_query_results", return_value=google.cloud.bigquery.table._EmptyRowIterator(), @@ -1233,17 +1198,16 @@ def test_bigquery_magic_w_maximum_bytes_billed_w_context_setter(): @pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_bigquery_magic_with_no_query_cache(monkeypatch): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") + ip.extension_manager.load_extension("bigquery_magics") conn = make_connection() - monkeypatch.setattr(magics.context, "_connection", conn) - monkeypatch.setattr(magics.context, "project", "project-from-context") + monkeypatch.setattr(bigquery_magics.context, "_connection", conn) + monkeypatch.setattr(bigquery_magics.context, "project", "project-from-context") # --no_query_cache option should override context. monkeypatch.setattr( - magics.context.default_query_job_config, "use_query_cache", True + bigquery_magics.context.default_query_job_config, "use_query_cache", True ) ip.run_cell_magic("bigquery", "--no_query_cache", QUERY_STRING) @@ -1263,15 +1227,14 @@ def test_bigquery_magic_with_no_query_cache(monkeypatch): @pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_context_with_no_query_cache_from_context(monkeypatch): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") + ip.extension_manager.load_extension("bigquery_magics") conn = make_connection() - monkeypatch.setattr(magics.context, "_connection", conn) - monkeypatch.setattr(magics.context, "project", "project-from-context") + monkeypatch.setattr(bigquery_magics.context, "_connection", conn) + monkeypatch.setattr(bigquery_magics.context, "project", "project-from-context") monkeypatch.setattr( - magics.context.default_query_job_config, "use_query_cache", False + bigquery_magics.context.default_query_job_config, "use_query_cache", False ) ip.run_cell_magic("bigquery", "", QUERY_STRING) @@ -1291,13 +1254,12 @@ def test_context_with_no_query_cache_from_context(monkeypatch): @pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_bigquery_magic_w_progress_bar_type_w_context_setter(monkeypatch): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context._project = None - magics.context.progress_bar_type = "tqdm_gui" + bigquery_magics.context.progress_bar_type = "tqdm_gui" mock_credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True @@ -1305,19 +1267,21 @@ def test_bigquery_magic_w_progress_bar_type_w_context_setter(monkeypatch): # Set up the context with monkeypatch so that it's reset for subsequent # tests. - monkeypatch.setattr(magics.context, "_credentials", mock_credentials) + monkeypatch.setattr(bigquery_magics.context, "_credentials", mock_credentials) # Mock out the BigQuery Storage API. - bqstorage_mock = mock.create_autospec(bigquery_storage.BigQueryReadClient) - bqstorage_client_patch = mock.patch( - "google.cloud.bigquery_storage.BigQueryReadClient", bqstorage_mock - ) + if bigquery_storage is not None: + bqstorage_mock = mock.create_autospec(bigquery_storage.BigQueryReadClient) + bqstorage_client_patch = mock.patch( + "google.cloud.bigquery_storage.BigQueryReadClient", bqstorage_mock + ) + else: + bqstorage_mock = mock.MagicMock() + bqstorage_client_patch = contextlib.nullcontext() sql = "SELECT 17 AS num" result = pandas.DataFrame([17], columns=["num"]) - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) query_job_mock = mock.create_autospec( google.cloud.bigquery.job.QueryJob, instance=True ) @@ -1331,7 +1295,7 @@ def test_bigquery_magic_w_progress_bar_type_w_context_setter(monkeypatch): query_job_mock.to_dataframe.assert_called_once_with( bqstorage_client=None, create_bqstorage_client=False, - progress_bar_type=magics.context.progress_bar_type, + progress_bar_type=bigquery_magics.context.progress_bar_type, ) assert isinstance(return_value, pandas.DataFrame) @@ -1340,12 +1304,10 @@ def test_bigquery_magic_w_progress_bar_type_w_context_setter(monkeypatch): @pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_with_progress_bar_type(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.progress_bar_type = None + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context.progress_bar_type = None - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) with run_query_patch as run_query_mock: ip.run_cell_magic( "bigquery", "--progress_bar_type=tqdm_gui", "SELECT 17 as num" @@ -1354,14 +1316,14 @@ def test_bigquery_magic_with_progress_bar_type(): progress_bar_used = run_query_mock.mock_calls[1][2]["progress_bar_type"] assert progress_bar_used == "tqdm_gui" # context progress bar type should not change - assert magics.context.progress_bar_type is None + assert bigquery_magics.context.progress_bar_type is None @pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_with_project(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context._project = None credentials_mock = mock.create_autospec( google.auth.credentials.Credentials, instance=True @@ -1369,27 +1331,23 @@ def test_bigquery_magic_with_project(): default_patch = mock.patch( "google.auth.default", return_value=(credentials_mock, "general-project") ) - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) with run_query_patch as run_query_mock, default_patch: ip.run_cell_magic("bigquery", "--project=specific-project", "SELECT 17 as num") client_used = run_query_mock.call_args_list[0][0][0] assert client_used.project == "specific-project" # context project should not change - assert magics.context.project == "general-project" + assert bigquery_magics.context.project == "general-project" @pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_with_bigquery_api_endpoint(ipython_ns_cleanup): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._connection = None + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context._connection = None - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) with run_query_patch as run_query_mock: ip.run_cell_magic( "bigquery", @@ -1400,19 +1358,17 @@ def test_bigquery_magic_with_bigquery_api_endpoint(ipython_ns_cleanup): connection_used = run_query_mock.call_args_list[0][0][0]._connection assert connection_used.API_BASE_URL == "https://bigquery_api.endpoint.com" # context client options should not change - assert magics.context.bigquery_client_options.api_endpoint is None + assert bigquery_magics.context.bigquery_client_options.api_endpoint is None @pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_with_bigquery_api_endpoint_context_dict(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._connection = None - magics.context.bigquery_client_options = {} + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context._connection = None + bigquery_magics.context.bigquery_client_options = {} - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) with run_query_patch as run_query_mock: ip.run_cell_magic( "bigquery", @@ -1423,18 +1379,19 @@ def test_bigquery_magic_with_bigquery_api_endpoint_context_dict(): connection_used = run_query_mock.call_args_list[0][0][0]._connection assert connection_used.API_BASE_URL == "https://bigquery_api.endpoint.com" # context client options should not change - assert magics.context.bigquery_client_options == {} + assert bigquery_magics.context.bigquery_client_options == {} @pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif( + bigquery_storage is None, reason="Requires `google-cloud-bigquery-storage`" +) def test_bigquery_magic_with_bqstorage_api_endpoint(ipython_ns_cleanup): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._connection = None + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context._connection = None - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) with run_query_patch as run_query_mock: ip.run_cell_magic( "bigquery", @@ -1445,19 +1402,20 @@ def test_bigquery_magic_with_bqstorage_api_endpoint(ipython_ns_cleanup): client_used = run_query_mock.mock_calls[1][2]["bqstorage_client"] assert client_used._transport._host == "https://bqstorage_api.endpoint.com" # context client options should not change - assert magics.context.bqstorage_client_options.api_endpoint is None + assert bigquery_magics.context.bqstorage_client_options.api_endpoint is None @pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif( + bigquery_storage is None, reason="Requires `google-cloud-bigquery-storage`" +) def test_bigquery_magic_with_bqstorage_api_endpoint_context_dict(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._connection = None - magics.context.bqstorage_client_options = {} + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context._connection = None + bigquery_magics.context.bqstorage_client_options = {} - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) with run_query_patch as run_query_mock: ip.run_cell_magic( "bigquery", @@ -1468,14 +1426,14 @@ def test_bigquery_magic_with_bqstorage_api_endpoint_context_dict(): client_used = run_query_mock.mock_calls[1][2]["bqstorage_client"] assert client_used._transport._host == "https://bqstorage_api.endpoint.com" # context client options should not change - assert magics.context.bqstorage_client_options == {} + assert bigquery_magics.context.bqstorage_client_options == {} @pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_with_multiple_options(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context._project = None credentials_mock = mock.create_autospec( google.auth.credentials.Credentials, instance=True @@ -1483,9 +1441,7 @@ def test_bigquery_magic_with_multiple_options(): default_patch = mock.patch( "google.auth.default", return_value=(credentials_mock, "general-project") ) - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) with run_query_patch as run_query_mock, default_patch: ip.run_cell_magic( "bigquery", @@ -1503,11 +1459,10 @@ def test_bigquery_magic_with_multiple_options(): @pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_bigquery_magic_with_string_params(ipython_ns_cleanup): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context.credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) @@ -1518,9 +1473,7 @@ def test_bigquery_magic_with_string_params(ipython_ns_cleanup): assert "params_dict_df" not in ip.user_ns - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) query_job_mock = mock.create_autospec( google.cloud.bigquery.job.QueryJob, instance=True ) @@ -1540,11 +1493,10 @@ def test_bigquery_magic_with_string_params(ipython_ns_cleanup): @pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_bigquery_magic_with_dict_params(ipython_ns_cleanup): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context.credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) @@ -1557,9 +1509,7 @@ def test_bigquery_magic_with_dict_params(ipython_ns_cleanup): assert "params_dict_df" not in ip.user_ns - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) query_job_mock = mock.create_autospec( google.cloud.bigquery.job.QueryJob, instance=True ) @@ -1584,11 +1534,10 @@ def test_bigquery_magic_with_dict_params(ipython_ns_cleanup): @pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_bigquery_magic_with_dict_params_nonexisting(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context.credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) @@ -1599,11 +1548,10 @@ def test_bigquery_magic_with_dict_params_nonexisting(): @pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_bigquery_magic_with_dict_params_incorrect_syntax(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context.credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) @@ -1615,11 +1563,10 @@ def test_bigquery_magic_with_dict_params_incorrect_syntax(): @pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_bigquery_magic_with_dict_params_duplicate(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context.credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) @@ -1633,11 +1580,10 @@ def test_bigquery_magic_with_dict_params_duplicate(): @pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_bigquery_magic_with_option_value_incorrect(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context.credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) @@ -1649,11 +1595,10 @@ def test_bigquery_magic_with_option_value_incorrect(): @pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_bigquery_magic_with_dict_params_negative_value(ipython_ns_cleanup): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context.credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) @@ -1664,9 +1609,7 @@ def test_bigquery_magic_with_dict_params_negative_value(ipython_ns_cleanup): assert "params_dict_df" not in ip.user_ns - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) query_job_mock = mock.create_autospec( google.cloud.bigquery.job.QueryJob, instance=True ) @@ -1689,11 +1632,10 @@ def test_bigquery_magic_with_dict_params_negative_value(ipython_ns_cleanup): @pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_bigquery_magic_with_dict_params_array_value(ipython_ns_cleanup): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context.credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) @@ -1704,9 +1646,7 @@ def test_bigquery_magic_with_dict_params_array_value(ipython_ns_cleanup): assert "params_dict_df" not in ip.user_ns - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) query_job_mock = mock.create_autospec( google.cloud.bigquery.job.QueryJob, instance=True ) @@ -1729,11 +1669,10 @@ def test_bigquery_magic_with_dict_params_array_value(ipython_ns_cleanup): @pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_bigquery_magic_with_dict_params_tuple_value(ipython_ns_cleanup): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context.credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) @@ -1744,9 +1683,7 @@ def test_bigquery_magic_with_dict_params_tuple_value(ipython_ns_cleanup): assert "params_dict_df" not in ip.user_ns - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) query_job_mock = mock.create_autospec( google.cloud.bigquery.job.QueryJob, instance=True ) @@ -1769,11 +1706,10 @@ def test_bigquery_magic_with_dict_params_tuple_value(ipython_ns_cleanup): @pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_bigquery_magic_with_improperly_formatted_params(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context.credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) @@ -1787,20 +1723,17 @@ def test_bigquery_magic_with_improperly_formatted_params(): "raw_sql", ("SELECT answer AS 42", " \t SELECT answer AS 42 \t ") ) @pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_bigquery_magic_valid_query_in_existing_variable(ipython_ns_cleanup, raw_sql): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context.credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) ipython_ns_cleanup.append((ip, "custom_query")) ipython_ns_cleanup.append((ip, "query_results_df")) - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) query_job_mock = mock.create_autospec( google.cloud.bigquery.job.QueryJob, instance=True ) @@ -1826,17 +1759,14 @@ def test_bigquery_magic_valid_query_in_existing_variable(ipython_ns_cleanup, raw @pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_bigquery_magic_nonexisting_query_variable(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context.credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) ip.user_ns.pop("custom_query", None) # Make sure the variable does NOT exist. cell_body = "$custom_query" # Referring to a non-existing variable name. @@ -1850,17 +1780,14 @@ def test_bigquery_magic_nonexisting_query_variable(): @pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_bigquery_magic_empty_query_variable_name(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context.credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) cell_body = "$" # Not referring to any variable (name omitted). with pytest.raises( @@ -1872,17 +1799,14 @@ def test_bigquery_magic_empty_query_variable_name(): @pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_bigquery_magic_query_variable_non_string(ipython_ns_cleanup): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context.credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) ipython_ns_cleanup.append((ip, "custom_query")) @@ -1898,11 +1822,10 @@ def test_bigquery_magic_query_variable_non_string(ipython_ns_cleanup): @pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_bigquery_magic_query_variable_not_identifier(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context.credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) @@ -1921,11 +1844,10 @@ def test_bigquery_magic_query_variable_not_identifier(): @pytest.mark.usefixtures("ipython_interactive") -@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_bigquery_magic_with_invalid_multiple_option_values(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context.credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) @@ -1941,7 +1863,7 @@ def test_bigquery_magic_with_invalid_multiple_option_values(): @pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_omits_tracebacks_from_error_message(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") + ip.extension_manager.load_extension("bigquery_magics") credentials_mock = mock.create_autospec( google.auth.credentials.Credentials, instance=True @@ -1951,7 +1873,7 @@ def test_bigquery_magic_omits_tracebacks_from_error_message(): ) run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", + "bigquery_magics.bigquery._run_query", autospec=True, side_effect=exceptions.BadRequest("Syntax error in SQL query"), ) @@ -1968,8 +1890,8 @@ def test_bigquery_magic_omits_tracebacks_from_error_message(): @pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_w_destination_table_invalid_format(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context._project = None + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context._project = None credentials_mock = mock.create_autospec( google.auth.credentials.Credentials, instance=True @@ -1978,9 +1900,7 @@ def test_bigquery_magic_w_destination_table_invalid_format(): "google.auth.default", return_value=(credentials_mock, "general-project") ) - client_patch = mock.patch( - "google.cloud.bigquery.magics.magics.bigquery.Client", autospec=True - ) + client_patch = mock.patch("bigquery_magics.bigquery.bigquery.Client", autospec=True) with client_patch, default_patch, pytest.raises(ValueError) as exc_context: ip.run_cell_magic( @@ -1996,19 +1916,17 @@ def test_bigquery_magic_w_destination_table_invalid_format(): @pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_w_destination_table(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context.credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) create_dataset_if_necessary_patch = mock.patch( - "google.cloud.bigquery.magics.magics._create_dataset_if_necessary", + "bigquery_magics.bigquery._create_dataset_if_necessary", autospec=True, ) - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) with create_dataset_if_necessary_patch, run_query_patch as run_query_mock: ip.run_cell_magic( @@ -2028,18 +1946,18 @@ def test_bigquery_magic_w_destination_table(): @pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_create_dataset_fails(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context.credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) create_dataset_if_necessary_patch = mock.patch( - "google.cloud.bigquery.magics.magics._create_dataset_if_necessary", + "bigquery_magics.bigquery._create_dataset_if_necessary", autospec=True, side_effect=OSError, ) close_transports_patch = mock.patch( - "google.cloud.bigquery.magics.magics._close_transports", + "bigquery_magics.bigquery._close_transports", autospec=True, ) @@ -2058,14 +1976,12 @@ def test_bigquery_magic_create_dataset_fails(): @pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_with_location(): ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - magics.context.credentials = mock.create_autospec( + ip.extension_manager.load_extension("bigquery_magics") + bigquery_magics.context.credentials = mock.create_autospec( google.auth.credentials.Credentials, instance=True ) - run_query_patch = mock.patch( - "google.cloud.bigquery.magics.magics._run_query", autospec=True - ) + run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) with run_query_patch as run_query_mock: ip.run_cell_magic("bigquery", "--location=us-east1", "SELECT 17 AS num") From 8ef9b2c60de2546a6d8c7144fbca8e8549b0f875 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 11 Apr 2024 09:01:29 -0500 Subject: [PATCH 19/20] remove auth from unit tests --- tests/unit/test_bigquery.py | 162 ++++++++---------------------------- tests/unit/test_context.py | 59 +++++++++++++ 2 files changed, 93 insertions(+), 128 deletions(-) create mode 100644 tests/unit/test_context.py diff --git a/tests/unit/test_bigquery.py b/tests/unit/test_bigquery.py index 678aa56..f9c609f 100644 --- a/tests/unit/test_bigquery.py +++ b/tests/unit/test_bigquery.py @@ -109,6 +109,18 @@ def fail_if(name, globals, locals, fromlist, level): return test_utils.imports.maybe_fail_import(predicate=fail_if) +@pytest.fixture +def mock_credentials(monkeypatch): + credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + # Set up the context with monkeypatch so that it's reset for subsequent + # tests. + monkeypatch.setattr(bigquery_magics.context, "_project", "test-project") + monkeypatch.setattr(bigquery_magics.context, "_credentials", credentials) + + PROJECT_ID = "its-a-project-eh" JOB_ID = "some-random-id" JOB_REFERENCE_RESOURCE = {"projectId": PROJECT_ID, "jobId": JOB_ID} @@ -140,27 +152,6 @@ def fail_if(name, globals, locals, fromlist, level): } -def test_context_with_default_credentials(): - """When Application Default Credentials are set, the context credentials - will be created the first time it is called - """ - assert bigquery_magics.context._credentials is None - assert bigquery_magics.context._project is None - - project = "prahj-ekt" - credentials_mock = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - default_patch = mock.patch( - "google.auth.default", return_value=(credentials_mock, project) - ) - with default_patch as default_mock: - assert bigquery_magics.context.credentials is credentials_mock - assert bigquery_magics.context.project == project - - assert default_mock.call_count == 2 - - @pytest.mark.usefixtures("ipython_interactive") def test_context_with_default_connection(): ip = IPython.get_ipython() @@ -205,25 +196,6 @@ def test_context_with_default_connection(): default_conn.api_request.assert_has_calls([begin_call, query_results_call]) -def test_context_credentials_and_project_can_be_set_explicitly(): - project1 = "one-project-55564" - project2 = "other-project-52569" - credentials_mock = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - default_patch = mock.patch( - "google.auth.default", return_value=(credentials_mock, project1) - ) - with default_patch as default_mock: - bigquery_magics.context.credentials = credentials_mock - bigquery_magics.context.project = project2 - - assert bigquery_magics.context.project == project2 - assert bigquery_magics.context.credentials is credentials_mock - # default should not be called if credentials & project are explicitly set - assert default_mock.call_count == 0 - - @pytest.mark.usefixtures("ipython_interactive") def test_context_with_custom_connection(): ip = IPython.get_ipython() @@ -1253,22 +1225,13 @@ def test_context_with_no_query_cache_from_context(monkeypatch): assert not jobs_insert_call[1]["data"]["configuration"]["query"]["useQueryCache"] -@pytest.mark.usefixtures("ipython_interactive") -def test_bigquery_magic_w_progress_bar_type_w_context_setter(monkeypatch): +@pytest.mark.usefixtures("ipython_interactive", "mock_credentials") +def test_bigquery_magic_w_progress_bar_type_w_context_setter(): ip = IPython.get_ipython() ip.extension_manager.load_extension("bigquery_magics") - bigquery_magics.context._project = None bigquery_magics.context.progress_bar_type = "tqdm_gui" - mock_credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - - # Set up the context with monkeypatch so that it's reset for subsequent - # tests. - monkeypatch.setattr(bigquery_magics.context, "_credentials", mock_credentials) - # Mock out the BigQuery Storage API. if bigquery_storage is not None: bqstorage_mock = mock.create_autospec(bigquery_storage.BigQueryReadClient) @@ -1301,7 +1264,7 @@ def test_bigquery_magic_w_progress_bar_type_w_context_setter(monkeypatch): assert isinstance(return_value, pandas.DataFrame) -@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.usefixtures("ipython_interactive", "mock_credentials") def test_bigquery_magic_with_progress_bar_type(): ip = IPython.get_ipython() ip.extension_manager.load_extension("bigquery_magics") @@ -1458,7 +1421,7 @@ def test_bigquery_magic_with_multiple_options(): assert job_config_used.maximum_bytes_billed == 1024 -@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.usefixtures("ipython_interactive", "mock_credentials") def test_bigquery_magic_with_string_params(ipython_ns_cleanup): ip = IPython.get_ipython() ip.extension_manager.load_extension("bigquery_magics") @@ -1492,7 +1455,7 @@ def test_bigquery_magic_with_string_params(ipython_ns_cleanup): assert list(df) == list(result) # verify column names -@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.usefixtures("ipython_interactive", "mock_credentials") def test_bigquery_magic_with_dict_params(ipython_ns_cleanup): ip = IPython.get_ipython() ip.extension_manager.load_extension("bigquery_magics") @@ -1566,9 +1529,6 @@ def test_bigquery_magic_with_dict_params_incorrect_syntax(): def test_bigquery_magic_with_dict_params_duplicate(): ip = IPython.get_ipython() ip.extension_manager.load_extension("bigquery_magics") - bigquery_magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) sql = "SELECT @foo AS foo" @@ -1579,13 +1539,10 @@ def test_bigquery_magic_with_dict_params_duplicate(): ip.run_cell_magic("bigquery", cell_magic_args, sql) -@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.usefixtures("ipython_interactive", "mock_credentials") def test_bigquery_magic_with_option_value_incorrect(): ip = IPython.get_ipython() ip.extension_manager.load_extension("bigquery_magics") - bigquery_magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) sql = "SELECT @foo AS foo" @@ -1594,13 +1551,10 @@ def test_bigquery_magic_with_option_value_incorrect(): ip.run_cell_magic("bigquery", cell_magic_args, sql) -@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.usefixtures("ipython_interactive", "mock_credentials") def test_bigquery_magic_with_dict_params_negative_value(ipython_ns_cleanup): ip = IPython.get_ipython() ip.extension_manager.load_extension("bigquery_magics") - bigquery_magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) ipython_ns_cleanup.append((ip, "params_dict_df")) @@ -1631,13 +1585,10 @@ def test_bigquery_magic_with_dict_params_negative_value(ipython_ns_cleanup): assert df["num"][0] == -17 -@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.usefixtures("ipython_interactive", "mock_credentials") def test_bigquery_magic_with_dict_params_array_value(ipython_ns_cleanup): ip = IPython.get_ipython() ip.extension_manager.load_extension("bigquery_magics") - bigquery_magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) ipython_ns_cleanup.append((ip, "params_dict_df")) @@ -1668,13 +1619,10 @@ def test_bigquery_magic_with_dict_params_array_value(ipython_ns_cleanup): assert list(df["array_data"]) == ["foo bar", "baz quux"] -@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.usefixtures("ipython_interactive", "mock_credentials") def test_bigquery_magic_with_dict_params_tuple_value(ipython_ns_cleanup): ip = IPython.get_ipython() ip.extension_manager.load_extension("bigquery_magics") - bigquery_magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) ipython_ns_cleanup.append((ip, "params_dict_df")) @@ -1722,13 +1670,10 @@ def test_bigquery_magic_with_improperly_formatted_params(): @pytest.mark.parametrize( "raw_sql", ("SELECT answer AS 42", " \t SELECT answer AS 42 \t ") ) -@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.usefixtures("ipython_interactive", "mock_credentials") def test_bigquery_magic_valid_query_in_existing_variable(ipython_ns_cleanup, raw_sql): ip = IPython.get_ipython() ip.extension_manager.load_extension("bigquery_magics") - bigquery_magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) ipython_ns_cleanup.append((ip, "custom_query")) ipython_ns_cleanup.append((ip, "query_results_df")) @@ -1758,13 +1703,10 @@ def test_bigquery_magic_valid_query_in_existing_variable(ipython_ns_cleanup, raw assert list(df["answer"]) == [42] -@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.usefixtures("ipython_interactive", "mock_credentials") def test_bigquery_magic_nonexisting_query_variable(): ip = IPython.get_ipython() ip.extension_manager.load_extension("bigquery_magics") - bigquery_magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) @@ -1779,13 +1721,10 @@ def test_bigquery_magic_nonexisting_query_variable(): run_query_mock.assert_not_called() -@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.usefixtures("ipython_interactive", "mock_credentials") def test_bigquery_magic_empty_query_variable_name(): ip = IPython.get_ipython() ip.extension_manager.load_extension("bigquery_magics") - bigquery_magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) cell_body = "$" # Not referring to any variable (name omitted). @@ -1798,13 +1737,10 @@ def test_bigquery_magic_empty_query_variable_name(): run_query_mock.assert_not_called() -@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.usefixtures("ipython_interactive", "mock_credentials") def test_bigquery_magic_query_variable_non_string(ipython_ns_cleanup): ip = IPython.get_ipython() ip.extension_manager.load_extension("bigquery_magics") - bigquery_magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) @@ -1821,13 +1757,10 @@ def test_bigquery_magic_query_variable_non_string(ipython_ns_cleanup): run_query_mock.assert_not_called() -@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.usefixtures("ipython_interactive", "mock_credentials") def test_bigquery_magic_query_variable_not_identifier(): ip = IPython.get_ipython() ip.extension_manager.load_extension("bigquery_magics") - bigquery_magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) cell_body = "$123foo" # 123foo is not valid Python identifier @@ -1843,13 +1776,10 @@ def test_bigquery_magic_query_variable_not_identifier(): assert "must be a fully-qualified ID" in output -@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.usefixtures("ipython_interactive", "mock_credentials") def test_bigquery_magic_with_invalid_multiple_option_values(): ip = IPython.get_ipython() ip.extension_manager.load_extension("bigquery_magics") - bigquery_magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) sql = "SELECT @foo AS foo" @@ -1860,25 +1790,18 @@ def test_bigquery_magic_with_invalid_multiple_option_values(): ip.run_cell_magic("bigquery", cell_magic_args, sql) -@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.usefixtures("ipython_interactive", "mock_credentials") def test_bigquery_magic_omits_tracebacks_from_error_message(): ip = IPython.get_ipython() ip.extension_manager.load_extension("bigquery_magics") - credentials_mock = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - default_patch = mock.patch( - "google.auth.default", return_value=(credentials_mock, "general-project") - ) - run_query_patch = mock.patch( "bigquery_magics.bigquery._run_query", autospec=True, side_effect=exceptions.BadRequest("Syntax error in SQL query"), ) - with run_query_patch, default_patch, io.capture_output() as captured_io: + with run_query_patch, io.capture_output() as captured_io: ip.run_cell_magic("bigquery", "", "SELECT foo FROM WHERE LIMIT bar") output = captured_io.stderr @@ -1887,22 +1810,14 @@ def test_bigquery_magic_omits_tracebacks_from_error_message(): assert "Syntax error" not in captured_io.stdout -@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.usefixtures("ipython_interactive", "mock_credentials") def test_bigquery_magic_w_destination_table_invalid_format(): ip = IPython.get_ipython() ip.extension_manager.load_extension("bigquery_magics") - bigquery_magics.context._project = None - - credentials_mock = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) - default_patch = mock.patch( - "google.auth.default", return_value=(credentials_mock, "general-project") - ) client_patch = mock.patch("bigquery_magics.bigquery.bigquery.Client", autospec=True) - with client_patch, default_patch, pytest.raises(ValueError) as exc_context: + with client_patch, pytest.raises(ValueError) as exc_context: ip.run_cell_magic( "bigquery", "--destination_table dataset", "SELECT foo FROM WHERE LIMIT bar" ) @@ -1913,13 +1828,10 @@ def test_bigquery_magic_w_destination_table_invalid_format(): ) -@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.usefixtures("ipython_interactive", "mock_credentials") def test_bigquery_magic_w_destination_table(): ip = IPython.get_ipython() ip.extension_manager.load_extension("bigquery_magics") - bigquery_magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) create_dataset_if_necessary_patch = mock.patch( "bigquery_magics.bigquery._create_dataset_if_necessary", @@ -1943,13 +1855,10 @@ def test_bigquery_magic_w_destination_table(): assert job_config_used.destination.table_id == "table_id" -@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.usefixtures("ipython_interactive", "mock_credentials") def test_bigquery_magic_create_dataset_fails(): ip = IPython.get_ipython() ip.extension_manager.load_extension("bigquery_magics") - bigquery_magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) create_dataset_if_necessary_patch = mock.patch( "bigquery_magics.bigquery._create_dataset_if_necessary", @@ -1973,13 +1882,10 @@ def test_bigquery_magic_create_dataset_fails(): assert close_transports.called -@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.usefixtures("ipython_interactive", "mock_credentials") def test_bigquery_magic_with_location(): ip = IPython.get_ipython() ip.extension_manager.load_extension("bigquery_magics") - bigquery_magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True - ) run_query_patch = mock.patch("bigquery_magics.bigquery._run_query", autospec=True) with run_query_patch as run_query_mock: diff --git a/tests/unit/test_context.py b/tests/unit/test_context.py new file mode 100644 index 0000000..5236c0c --- /dev/null +++ b/tests/unit/test_context.py @@ -0,0 +1,59 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest.mock as mock + +import google.auth.credentials + +import bigquery_magics + + +def test_context_with_default_credentials(): + """When Application Default Credentials are set, the context credentials + will be created the first time it is called + """ + bigquery_magics.context._credentials = None + bigquery_magics.context._project = None + + project = "prahj-ekt" + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + default_patch = mock.patch( + "google.auth.default", return_value=(credentials_mock, project) + ) + with default_patch as default_mock: + assert bigquery_magics.context.credentials is credentials_mock + assert bigquery_magics.context.project == project + + assert default_mock.call_count == 2 + + +def test_context_credentials_and_project_can_be_set_explicitly(): + project1 = "one-project-55564" + project2 = "other-project-52569" + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + default_patch = mock.patch( + "google.auth.default", return_value=(credentials_mock, project1) + ) + with default_patch as default_mock: + bigquery_magics.context.credentials = credentials_mock + bigquery_magics.context.project = project2 + + assert bigquery_magics.context.project == project2 + assert bigquery_magics.context.credentials is credentials_mock + # default should not be called if credentials & project are explicitly set + assert default_mock.call_count == 0 From ae835f6289cb7727d7369d7acb5c1278b79f9c45 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 11 Apr 2024 11:11:06 -0500 Subject: [PATCH 20/20] fix coverage --- tests/unit/line_arg_parser/test_lexer.py | 6 +-- tests/unit/line_arg_parser/test_parser.py | 48 ++++++++++----------- tests/unit/line_arg_parser/test_visitors.py | 4 +- 3 files changed, 29 insertions(+), 29 deletions(-) diff --git a/tests/unit/line_arg_parser/test_lexer.py b/tests/unit/line_arg_parser/test_lexer.py index 3624ed0..08715b6 100644 --- a/tests/unit/line_arg_parser/test_lexer.py +++ b/tests/unit/line_arg_parser/test_lexer.py @@ -19,14 +19,14 @@ @pytest.fixture(scope="session") def lexer_class(): - from google.cloud.bigquery.magics.line_arg_parser.lexer import Lexer + from bigquery_magics.line_arg_parser.lexer import Lexer return Lexer def test_empy_input(lexer_class): - from google.cloud.bigquery.magics.line_arg_parser import TokenType - from google.cloud.bigquery.magics.line_arg_parser.lexer import Token + from bigquery_magics.line_arg_parser import TokenType + from bigquery_magics.line_arg_parser.lexer import Token lexer = lexer_class("") tokens = list(lexer) diff --git a/tests/unit/line_arg_parser/test_parser.py b/tests/unit/line_arg_parser/test_parser.py index 2d6c62f..a6b2b43 100644 --- a/tests/unit/line_arg_parser/test_parser.py +++ b/tests/unit/line_arg_parser/test_parser.py @@ -19,14 +19,14 @@ @pytest.fixture(scope="session") def parser_class(): - from google.cloud.bigquery.magics.line_arg_parser.parser import Parser + from bigquery_magics.line_arg_parser.parser import Parser return Parser def test_consume_expected_eol(parser_class): - from google.cloud.bigquery.magics.line_arg_parser import TokenType - from google.cloud.bigquery.magics.line_arg_parser.lexer import Token + from bigquery_magics.line_arg_parser import TokenType + from bigquery_magics.line_arg_parser.lexer import Token # A simple iterable of Tokens is sufficient. fake_lexer = [Token(TokenType.EOL, lexeme="", pos=0)] @@ -36,8 +36,8 @@ def test_consume_expected_eol(parser_class): def test_consume_unexpected_eol(parser_class): - from google.cloud.bigquery.magics.line_arg_parser import ParseError, TokenType - from google.cloud.bigquery.magics.line_arg_parser.lexer import Token + from bigquery_magics.line_arg_parser import ParseError, TokenType + from bigquery_magics.line_arg_parser.lexer import Token # A simple iterable of Tokens is sufficient. fake_lexer = [Token(TokenType.EOL, lexeme="", pos=0)] @@ -48,8 +48,8 @@ def test_consume_unexpected_eol(parser_class): def test_input_line_unexpected_input(parser_class): - from google.cloud.bigquery.magics.line_arg_parser import ParseError, TokenType - from google.cloud.bigquery.magics.line_arg_parser.lexer import Token + from bigquery_magics.line_arg_parser import ParseError, TokenType + from bigquery_magics.line_arg_parser.lexer import Token # A simple iterable of Tokens is sufficient. fake_lexer = [ @@ -64,8 +64,8 @@ def test_input_line_unexpected_input(parser_class): def test_destination_var_unexpected_input(parser_class): - from google.cloud.bigquery.magics.line_arg_parser import ParseError, TokenType - from google.cloud.bigquery.magics.line_arg_parser.lexer import Token + from bigquery_magics.line_arg_parser import ParseError, TokenType + from bigquery_magics.line_arg_parser.lexer import Token # A simple iterable of Tokens is sufficient. fake_lexer = [ @@ -79,8 +79,8 @@ def test_destination_var_unexpected_input(parser_class): def test_option_value_unexpected_input(parser_class): - from google.cloud.bigquery.magics.line_arg_parser import ParseError, TokenType - from google.cloud.bigquery.magics.line_arg_parser.lexer import Token + from bigquery_magics.line_arg_parser import ParseError, TokenType + from bigquery_magics.line_arg_parser.lexer import Token # A simple iterable of Tokens is sufficient. fake_lexer = [ @@ -94,8 +94,8 @@ def test_option_value_unexpected_input(parser_class): def test_dict_items_empty_dict(parser_class): - from google.cloud.bigquery.magics.line_arg_parser import TokenType - from google.cloud.bigquery.magics.line_arg_parser.lexer import Token + from bigquery_magics.line_arg_parser import TokenType + from bigquery_magics.line_arg_parser.lexer import Token # A simple iterable of Tokens is sufficient. fake_lexer = [Token(TokenType.RCURL, lexeme="}", pos=22)] @@ -107,8 +107,8 @@ def test_dict_items_empty_dict(parser_class): def test_dict_items_trailing_comma(parser_class): - from google.cloud.bigquery.magics.line_arg_parser import TokenType - from google.cloud.bigquery.magics.line_arg_parser.lexer import Token + from bigquery_magics.line_arg_parser import TokenType + from bigquery_magics.line_arg_parser.lexer import Token # A simple iterable of Tokens is sufficient. fake_lexer = [ @@ -129,8 +129,8 @@ def test_dict_items_trailing_comma(parser_class): def test_dict_item_unknown_input(parser_class): - from google.cloud.bigquery.magics.line_arg_parser import ParseError, TokenType - from google.cloud.bigquery.magics.line_arg_parser.lexer import Token + from bigquery_magics.line_arg_parser import ParseError, TokenType + from bigquery_magics.line_arg_parser.lexer import Token # A simple iterable of Tokens is sufficient. fake_lexer = [Token(TokenType.UNKNOWN, lexeme="#/%", pos=35)] @@ -141,9 +141,9 @@ def test_dict_item_unknown_input(parser_class): def test_pyvalue_list_containing_dict(parser_class): - from google.cloud.bigquery.magics.line_arg_parser import TokenType - from google.cloud.bigquery.magics.line_arg_parser.lexer import Token - from google.cloud.bigquery.magics.line_arg_parser.parser import PyDict, PyList + from bigquery_magics.line_arg_parser import TokenType + from bigquery_magics.line_arg_parser.lexer import Token + from bigquery_magics.line_arg_parser.parser import PyDict, PyList # A simple iterable of Tokens is sufficient. fake_lexer = [ @@ -174,8 +174,8 @@ def test_pyvalue_list_containing_dict(parser_class): def test_pyvalue_invalid_token(parser_class): - from google.cloud.bigquery.magics.line_arg_parser import ParseError, TokenType - from google.cloud.bigquery.magics.line_arg_parser.lexer import Token + from bigquery_magics.line_arg_parser import ParseError, TokenType + from bigquery_magics.line_arg_parser.lexer import Token # A simple iterable of Tokens is sufficient. fake_lexer = [Token(TokenType.OPTION_SPEC, lexeme="--verbose", pos=75)] @@ -187,8 +187,8 @@ def test_pyvalue_invalid_token(parser_class): def test_collection_items_empty(parser_class): - from google.cloud.bigquery.magics.line_arg_parser import TokenType - from google.cloud.bigquery.magics.line_arg_parser.lexer import Token + from bigquery_magics.line_arg_parser import TokenType + from bigquery_magics.line_arg_parser.lexer import Token # A simple iterable of Tokens is sufficient. fake_lexer = [Token(TokenType.RPAREN, lexeme=")", pos=30)] diff --git a/tests/unit/line_arg_parser/test_visitors.py b/tests/unit/line_arg_parser/test_visitors.py index 288ef5f..5f8e13d 100644 --- a/tests/unit/line_arg_parser/test_visitors.py +++ b/tests/unit/line_arg_parser/test_visitors.py @@ -19,13 +19,13 @@ @pytest.fixture def base_visitor(): - from google.cloud.bigquery.magics.line_arg_parser.visitors import NodeVisitor + from bigquery_magics.line_arg_parser.visitors import NodeVisitor return NodeVisitor() def test_unknown_node(base_visitor): - from google.cloud.bigquery.magics.line_arg_parser.parser import ParseNode + from bigquery_magics.line_arg_parser.parser import ParseNode class UnknownNode(ParseNode): pass