From d6f2cd8f51c8411b682325b441030ce998eadf26 Mon Sep 17 00:00:00 2001 From: Owlbot Bootstrapper Date: Mon, 23 Sep 2024 16:08:53 +0000 Subject: [PATCH 1/9] feat: initial commit From d2fd1eb45d0c9e299a89d761692c0f8e44eecf69 Mon Sep 17 00:00:00 2001 From: Owlbot Bootstrapper Date: Mon, 23 Sep 2024 16:10:01 +0000 Subject: [PATCH 2/9] feat: initial generation of library Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLW9yYWNsZWRhdGFiYXNlLy5Pd2xCb3QueWFtbCIsImgiOiIwOWQ2OGYzNTM2NWM3NGFkMjc2Y2VhM2U3YzI2NTUzYTE0ODVmYWEwIn0= --- .../google-cloud-oracledatabase/.OwlBot.yaml | 18 ++++++++++++++++++ .../.repo-metadata.json | 17 +++++++++++++++++ 2 files changed, 35 insertions(+) create mode 100644 packages/google-cloud-oracledatabase/.OwlBot.yaml create mode 100644 packages/google-cloud-oracledatabase/.repo-metadata.json diff --git a/packages/google-cloud-oracledatabase/.OwlBot.yaml b/packages/google-cloud-oracledatabase/.OwlBot.yaml new file mode 100644 index 000000000000..ebf74202aadf --- /dev/null +++ b/packages/google-cloud-oracledatabase/.OwlBot.yaml @@ -0,0 +1,18 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/cloud/oracledatabase/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-oracledatabase/$1 +api-name: google-cloud-oracledatabase diff --git a/packages/google-cloud-oracledatabase/.repo-metadata.json b/packages/google-cloud-oracledatabase/.repo-metadata.json new file mode 100644 index 000000000000..1dce499bb5ac --- /dev/null +++ b/packages/google-cloud-oracledatabase/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "google-cloud-oracledatabase", + "name_pretty": "", + "api_description": "", + "product_documentation": "", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-oracledatabase/latest", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-oracledatabase", + "api_id": "oracledatabase.googleapis.com", + "default_version": "v1", + "codeowner_team": "", + "api_shortname": "oracledatabase" +} From 37d8bbbef789fb37c8b39634593c54431bb59512 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Mon, 23 Sep 2024 16:21:05 +0000 Subject: [PATCH 3/9] Owl Bot copied code from https://github.com/googleapis/googleapis-gen/commit/09d68f35365c74ad276cea3e7c26553a1485faa0 --- .../v1/.coveragerc | 13 + .../google-cloud-oracledatabase/v1/.flake8 | 33 + .../v1/MANIFEST.in | 2 + .../google-cloud-oracledatabase/v1/README.rst | 49 + .../v1/docs/_static/custom.css | 3 + .../v1/docs/conf.py | 376 + .../v1/docs/index.rst | 7 + .../oracledatabase_v1/oracle_database.rst | 10 + .../v1/docs/oracledatabase_v1/services_.rst | 6 + .../v1/docs/oracledatabase_v1/types_.rst | 6 + .../google/cloud/oracledatabase/__init__.py | 161 + .../cloud/oracledatabase/gapic_version.py | 16 + .../v1/google/cloud/oracledatabase/py.typed | 2 + .../cloud/oracledatabase_v1/__init__.py | 162 + .../oracledatabase_v1/gapic_metadata.json | 128 + .../cloud/oracledatabase_v1/gapic_version.py | 16 + .../google/cloud/oracledatabase_v1/py.typed | 2 + .../oracledatabase_v1/services/__init__.py | 15 + .../services/oracle_database/__init__.py | 20 + .../services/oracle_database/client.py | 3717 +++++++ .../services/oracle_database/pagers.py | 764 ++ .../oracle_database/transports/__init__.py | 32 + .../oracle_database/transports/base.py | 679 ++ .../oracle_database/transports/rest.py | 3204 +++++++ .../cloud/oracledatabase_v1/types/__init__.py | 182 + .../types/autonomous_database.py | 1418 +++ .../autonomous_database_character_set.py | 78 + .../types/autonomous_db_backup.py | 290 + .../types/autonomous_db_version.py | 72 + .../cloud/oracledatabase_v1/types/common.py | 48 + .../cloud/oracledatabase_v1/types/db_node.py | 158 + .../oracledatabase_v1/types/db_server.py | 163 + .../types/db_system_shape.py | 118 + .../oracledatabase_v1/types/entitlement.py | 127 + .../oracledatabase_v1/types/exadata_infra.py | 468 + .../oracledatabase_v1/types/gi_version.py | 55 + .../types/location_metadata.py | 46 + .../oracledatabase_v1/types/oracledatabase.py | 1227 +++ .../oracledatabase_v1/types/vm_cluster.py | 438 + .../google-cloud-oracledatabase/v1/mypy.ini | 3 + .../google-cloud-oracledatabase/v1/noxfile.py | 278 + ...atabase_create_autonomous_database_sync.py | 62 + ...reate_cloud_exadata_infrastructure_sync.py | 57 + ...e_database_create_cloud_vm_cluster_sync.py | 64 + ...atabase_delete_autonomous_database_sync.py | 56 + ...elete_cloud_exadata_infrastructure_sync.py | 56 + ...e_database_delete_cloud_vm_cluster_sync.py | 56 + ...enerate_autonomous_database_wallet_sync.py | 53 + ...e_database_get_autonomous_database_sync.py | 52 + ...e_get_cloud_exadata_infrastructure_sync.py | 52 + ...acle_database_get_cloud_vm_cluster_sync.py | 52 + ...e_list_autonomous_database_backups_sync.py | 53 + ...autonomous_database_character_sets_sync.py | 53 + ...database_list_autonomous_databases_sync.py | 53 + ...tabase_list_autonomous_db_versions_sync.py | 53 + ...list_cloud_exadata_infrastructures_sync.py | 53 + ...le_database_list_cloud_vm_clusters_sync.py | 53 + ...ated_oracle_database_list_db_nodes_sync.py | 53 + ...ed_oracle_database_list_db_servers_sync.py | 53 + ...cle_database_list_db_system_shapes_sync.py | 53 + ..._oracle_database_list_entitlements_sync.py | 53 + ...d_oracle_database_list_gi_versions_sync.py | 53 + ...tabase_restore_autonomous_database_sync.py | 56 + ...tadata_google.cloud.oracledatabase.v1.json | 1815 ++++ .../fixup_oracledatabase_v1_keywords.py | 197 + .../google-cloud-oracledatabase/v1/setup.py | 93 + .../v1/testing/constraints-3.10.txt | 6 + .../v1/testing/constraints-3.11.txt | 6 + .../v1/testing/constraints-3.12.txt | 6 + .../v1/testing/constraints-3.7.txt | 10 + .../v1/testing/constraints-3.8.txt | 6 + .../v1/testing/constraints-3.9.txt | 6 + .../v1/tests/__init__.py | 16 + .../v1/tests/unit/__init__.py | 16 + .../v1/tests/unit/gapic/__init__.py | 16 + .../unit/gapic/oracledatabase_v1/__init__.py | 16 + .../oracledatabase_v1/test_oracle_database.py | 8504 +++++++++++++++++ 77 files changed, 26504 insertions(+) create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/.coveragerc create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/.flake8 create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/MANIFEST.in create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/README.rst create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/docs/_static/custom.css create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/docs/conf.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/docs/index.rst create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/docs/oracledatabase_v1/oracle_database.rst create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/docs/oracledatabase_v1/services_.rst create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/docs/oracledatabase_v1/types_.rst create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase/__init__.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase/py.typed create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/__init__.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/gapic_metadata.json create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/py.typed create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/__init__.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/__init__.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/client.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/pagers.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/base.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/rest.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/__init__.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_database.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_database_character_set.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_db_backup.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_db_version.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/common.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_node.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_server.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_system_shape.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/entitlement.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/exadata_infra.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/gi_version.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/location_metadata.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/oracledatabase.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/vm_cluster.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/mypy.ini create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/noxfile.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_autonomous_database_sync.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_exadata_infrastructure_sync.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_vm_cluster_sync.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_autonomous_database_sync.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_exadata_infrastructure_sync.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_vm_cluster_sync.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_generate_autonomous_database_wallet_sync.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_autonomous_database_sync.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_exadata_infrastructure_sync.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_vm_cluster_sync.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_backups_sync.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_character_sets_sync.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_databases_sync.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_db_versions_sync.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_exadata_infrastructures_sync.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_vm_clusters_sync.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_nodes_sync.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_servers_sync.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_system_shapes_sync.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_entitlements_sync.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_gi_versions_sync.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_restore_autonomous_database_sync.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/snippet_metadata_google.cloud.oracledatabase.v1.json create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/scripts/fixup_oracledatabase_v1_keywords.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/setup.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/tests/__init__.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/oracledatabase_v1/__init__.py create mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/oracledatabase_v1/test_oracle_database.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/.coveragerc b/owl-bot-staging/google-cloud-oracledatabase/v1/.coveragerc new file mode 100644 index 000000000000..645dc04f340e --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/oracledatabase/__init__.py + google/cloud/oracledatabase/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/.flake8 b/owl-bot-staging/google-cloud-oracledatabase/v1/.flake8 new file mode 100644 index 000000000000..29227d4cf419 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/MANIFEST.in b/owl-bot-staging/google-cloud-oracledatabase/v1/MANIFEST.in new file mode 100644 index 000000000000..fb8faa261eae --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/oracledatabase *.py +recursive-include google/cloud/oracledatabase_v1 *.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/README.rst b/owl-bot-staging/google-cloud-oracledatabase/v1/README.rst new file mode 100644 index 000000000000..33823b82e94f --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Oracledatabase API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Oracledatabase API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-oracledatabase/v1/docs/_static/custom.css new file mode 100644 index 000000000000..06423be0b592 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/docs/conf.py b/owl-bot-staging/google-cloud-oracledatabase/v1/docs/conf.py new file mode 100644 index 000000000000..5c45cac86fd3 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-oracledatabase documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-oracledatabase" +copyright = u"2023, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = 'en' + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-oracledatabase-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-oracledatabase.tex", + u"google-cloud-oracledatabase Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-oracledatabase", + u"Google Cloud Oracledatabase Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-oracledatabase", + u"google-cloud-oracledatabase Documentation", + author, + "google-cloud-oracledatabase", + "GAPIC library for Google Cloud Oracledatabase API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/docs/index.rst b/owl-bot-staging/google-cloud-oracledatabase/v1/docs/index.rst new file mode 100644 index 000000000000..8b7bea7d55e1 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + oracledatabase_v1/services + oracledatabase_v1/types diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/docs/oracledatabase_v1/oracle_database.rst b/owl-bot-staging/google-cloud-oracledatabase/v1/docs/oracledatabase_v1/oracle_database.rst new file mode 100644 index 000000000000..ef9ce591ce83 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/docs/oracledatabase_v1/oracle_database.rst @@ -0,0 +1,10 @@ +OracleDatabase +-------------------------------- + +.. automodule:: google.cloud.oracledatabase_v1.services.oracle_database + :members: + :inherited-members: + +.. automodule:: google.cloud.oracledatabase_v1.services.oracle_database.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/docs/oracledatabase_v1/services_.rst b/owl-bot-staging/google-cloud-oracledatabase/v1/docs/oracledatabase_v1/services_.rst new file mode 100644 index 000000000000..8b9decce8ef1 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/docs/oracledatabase_v1/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Oracledatabase v1 API +=============================================== +.. toctree:: + :maxdepth: 2 + + oracle_database diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/docs/oracledatabase_v1/types_.rst b/owl-bot-staging/google-cloud-oracledatabase/v1/docs/oracledatabase_v1/types_.rst new file mode 100644 index 000000000000..addba88a94ee --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/docs/oracledatabase_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Oracledatabase v1 API +============================================ + +.. automodule:: google.cloud.oracledatabase_v1.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase/__init__.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase/__init__.py new file mode 100644 index 000000000000..59a90fe071f9 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase/__init__.py @@ -0,0 +1,161 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.oracledatabase import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.oracledatabase_v1.services.oracle_database.client import OracleDatabaseClient + +from google.cloud.oracledatabase_v1.types.autonomous_database import AllConnectionStrings +from google.cloud.oracledatabase_v1.types.autonomous_database import AutonomousDatabase +from google.cloud.oracledatabase_v1.types.autonomous_database import AutonomousDatabaseApex +from google.cloud.oracledatabase_v1.types.autonomous_database import AutonomousDatabaseConnectionStrings +from google.cloud.oracledatabase_v1.types.autonomous_database import AutonomousDatabaseConnectionUrls +from google.cloud.oracledatabase_v1.types.autonomous_database import AutonomousDatabaseProperties +from google.cloud.oracledatabase_v1.types.autonomous_database import AutonomousDatabaseStandbySummary +from google.cloud.oracledatabase_v1.types.autonomous_database import DatabaseConnectionStringProfile +from google.cloud.oracledatabase_v1.types.autonomous_database import ScheduledOperationDetails +from google.cloud.oracledatabase_v1.types.autonomous_database import DBWorkload +from google.cloud.oracledatabase_v1.types.autonomous_database import GenerateType +from google.cloud.oracledatabase_v1.types.autonomous_database import OperationsInsightsState +from google.cloud.oracledatabase_v1.types.autonomous_database import State +from google.cloud.oracledatabase_v1.types.autonomous_database_character_set import AutonomousDatabaseCharacterSet +from google.cloud.oracledatabase_v1.types.autonomous_db_backup import AutonomousDatabaseBackup +from google.cloud.oracledatabase_v1.types.autonomous_db_backup import AutonomousDatabaseBackupProperties +from google.cloud.oracledatabase_v1.types.autonomous_db_version import AutonomousDbVersion +from google.cloud.oracledatabase_v1.types.common import CustomerContact +from google.cloud.oracledatabase_v1.types.db_node import DbNode +from google.cloud.oracledatabase_v1.types.db_node import DbNodeProperties +from google.cloud.oracledatabase_v1.types.db_server import DbServer +from google.cloud.oracledatabase_v1.types.db_server import DbServerProperties +from google.cloud.oracledatabase_v1.types.db_system_shape import DbSystemShape +from google.cloud.oracledatabase_v1.types.entitlement import CloudAccountDetails +from google.cloud.oracledatabase_v1.types.entitlement import Entitlement +from google.cloud.oracledatabase_v1.types.exadata_infra import CloudExadataInfrastructure +from google.cloud.oracledatabase_v1.types.exadata_infra import CloudExadataInfrastructureProperties +from google.cloud.oracledatabase_v1.types.exadata_infra import MaintenanceWindow +from google.cloud.oracledatabase_v1.types.gi_version import GiVersion +from google.cloud.oracledatabase_v1.types.location_metadata import LocationMetadata +from google.cloud.oracledatabase_v1.types.oracledatabase import CreateAutonomousDatabaseRequest +from google.cloud.oracledatabase_v1.types.oracledatabase import CreateCloudExadataInfrastructureRequest +from google.cloud.oracledatabase_v1.types.oracledatabase import CreateCloudVmClusterRequest +from google.cloud.oracledatabase_v1.types.oracledatabase import DeleteAutonomousDatabaseRequest +from google.cloud.oracledatabase_v1.types.oracledatabase import DeleteCloudExadataInfrastructureRequest +from google.cloud.oracledatabase_v1.types.oracledatabase import DeleteCloudVmClusterRequest +from google.cloud.oracledatabase_v1.types.oracledatabase import GenerateAutonomousDatabaseWalletRequest +from google.cloud.oracledatabase_v1.types.oracledatabase import GenerateAutonomousDatabaseWalletResponse +from google.cloud.oracledatabase_v1.types.oracledatabase import GetAutonomousDatabaseRequest +from google.cloud.oracledatabase_v1.types.oracledatabase import GetCloudExadataInfrastructureRequest +from google.cloud.oracledatabase_v1.types.oracledatabase import GetCloudVmClusterRequest +from google.cloud.oracledatabase_v1.types.oracledatabase import ListAutonomousDatabaseBackupsRequest +from google.cloud.oracledatabase_v1.types.oracledatabase import ListAutonomousDatabaseBackupsResponse +from google.cloud.oracledatabase_v1.types.oracledatabase import ListAutonomousDatabaseCharacterSetsRequest +from google.cloud.oracledatabase_v1.types.oracledatabase import ListAutonomousDatabaseCharacterSetsResponse +from google.cloud.oracledatabase_v1.types.oracledatabase import ListAutonomousDatabasesRequest +from google.cloud.oracledatabase_v1.types.oracledatabase import ListAutonomousDatabasesResponse +from google.cloud.oracledatabase_v1.types.oracledatabase import ListAutonomousDbVersionsRequest +from google.cloud.oracledatabase_v1.types.oracledatabase import ListAutonomousDbVersionsResponse +from google.cloud.oracledatabase_v1.types.oracledatabase import ListCloudExadataInfrastructuresRequest +from google.cloud.oracledatabase_v1.types.oracledatabase import ListCloudExadataInfrastructuresResponse +from google.cloud.oracledatabase_v1.types.oracledatabase import ListCloudVmClustersRequest +from google.cloud.oracledatabase_v1.types.oracledatabase import ListCloudVmClustersResponse +from google.cloud.oracledatabase_v1.types.oracledatabase import ListDbNodesRequest +from google.cloud.oracledatabase_v1.types.oracledatabase import ListDbNodesResponse +from google.cloud.oracledatabase_v1.types.oracledatabase import ListDbServersRequest +from google.cloud.oracledatabase_v1.types.oracledatabase import ListDbServersResponse +from google.cloud.oracledatabase_v1.types.oracledatabase import ListDbSystemShapesRequest +from google.cloud.oracledatabase_v1.types.oracledatabase import ListDbSystemShapesResponse +from google.cloud.oracledatabase_v1.types.oracledatabase import ListEntitlementsRequest +from google.cloud.oracledatabase_v1.types.oracledatabase import ListEntitlementsResponse +from google.cloud.oracledatabase_v1.types.oracledatabase import ListGiVersionsRequest +from google.cloud.oracledatabase_v1.types.oracledatabase import ListGiVersionsResponse +from google.cloud.oracledatabase_v1.types.oracledatabase import OperationMetadata +from google.cloud.oracledatabase_v1.types.oracledatabase import RestoreAutonomousDatabaseRequest +from google.cloud.oracledatabase_v1.types.vm_cluster import CloudVmCluster +from google.cloud.oracledatabase_v1.types.vm_cluster import CloudVmClusterProperties +from google.cloud.oracledatabase_v1.types.vm_cluster import DataCollectionOptions + +__all__ = ('OracleDatabaseClient', + 'AllConnectionStrings', + 'AutonomousDatabase', + 'AutonomousDatabaseApex', + 'AutonomousDatabaseConnectionStrings', + 'AutonomousDatabaseConnectionUrls', + 'AutonomousDatabaseProperties', + 'AutonomousDatabaseStandbySummary', + 'DatabaseConnectionStringProfile', + 'ScheduledOperationDetails', + 'DBWorkload', + 'GenerateType', + 'OperationsInsightsState', + 'State', + 'AutonomousDatabaseCharacterSet', + 'AutonomousDatabaseBackup', + 'AutonomousDatabaseBackupProperties', + 'AutonomousDbVersion', + 'CustomerContact', + 'DbNode', + 'DbNodeProperties', + 'DbServer', + 'DbServerProperties', + 'DbSystemShape', + 'CloudAccountDetails', + 'Entitlement', + 'CloudExadataInfrastructure', + 'CloudExadataInfrastructureProperties', + 'MaintenanceWindow', + 'GiVersion', + 'LocationMetadata', + 'CreateAutonomousDatabaseRequest', + 'CreateCloudExadataInfrastructureRequest', + 'CreateCloudVmClusterRequest', + 'DeleteAutonomousDatabaseRequest', + 'DeleteCloudExadataInfrastructureRequest', + 'DeleteCloudVmClusterRequest', + 'GenerateAutonomousDatabaseWalletRequest', + 'GenerateAutonomousDatabaseWalletResponse', + 'GetAutonomousDatabaseRequest', + 'GetCloudExadataInfrastructureRequest', + 'GetCloudVmClusterRequest', + 'ListAutonomousDatabaseBackupsRequest', + 'ListAutonomousDatabaseBackupsResponse', + 'ListAutonomousDatabaseCharacterSetsRequest', + 'ListAutonomousDatabaseCharacterSetsResponse', + 'ListAutonomousDatabasesRequest', + 'ListAutonomousDatabasesResponse', + 'ListAutonomousDbVersionsRequest', + 'ListAutonomousDbVersionsResponse', + 'ListCloudExadataInfrastructuresRequest', + 'ListCloudExadataInfrastructuresResponse', + 'ListCloudVmClustersRequest', + 'ListCloudVmClustersResponse', + 'ListDbNodesRequest', + 'ListDbNodesResponse', + 'ListDbServersRequest', + 'ListDbServersResponse', + 'ListDbSystemShapesRequest', + 'ListDbSystemShapesResponse', + 'ListEntitlementsRequest', + 'ListEntitlementsResponse', + 'ListGiVersionsRequest', + 'ListGiVersionsResponse', + 'OperationMetadata', + 'RestoreAutonomousDatabaseRequest', + 'CloudVmCluster', + 'CloudVmClusterProperties', + 'DataCollectionOptions', +) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase/gapic_version.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase/py.typed b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase/py.typed new file mode 100644 index 000000000000..cd7e437b6f62 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-oracledatabase package uses inline types. diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/__init__.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/__init__.py new file mode 100644 index 000000000000..8b3946e07399 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/__init__.py @@ -0,0 +1,162 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.oracledatabase_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.oracle_database import OracleDatabaseClient + +from .types.autonomous_database import AllConnectionStrings +from .types.autonomous_database import AutonomousDatabase +from .types.autonomous_database import AutonomousDatabaseApex +from .types.autonomous_database import AutonomousDatabaseConnectionStrings +from .types.autonomous_database import AutonomousDatabaseConnectionUrls +from .types.autonomous_database import AutonomousDatabaseProperties +from .types.autonomous_database import AutonomousDatabaseStandbySummary +from .types.autonomous_database import DatabaseConnectionStringProfile +from .types.autonomous_database import ScheduledOperationDetails +from .types.autonomous_database import DBWorkload +from .types.autonomous_database import GenerateType +from .types.autonomous_database import OperationsInsightsState +from .types.autonomous_database import State +from .types.autonomous_database_character_set import AutonomousDatabaseCharacterSet +from .types.autonomous_db_backup import AutonomousDatabaseBackup +from .types.autonomous_db_backup import AutonomousDatabaseBackupProperties +from .types.autonomous_db_version import AutonomousDbVersion +from .types.common import CustomerContact +from .types.db_node import DbNode +from .types.db_node import DbNodeProperties +from .types.db_server import DbServer +from .types.db_server import DbServerProperties +from .types.db_system_shape import DbSystemShape +from .types.entitlement import CloudAccountDetails +from .types.entitlement import Entitlement +from .types.exadata_infra import CloudExadataInfrastructure +from .types.exadata_infra import CloudExadataInfrastructureProperties +from .types.exadata_infra import MaintenanceWindow +from .types.gi_version import GiVersion +from .types.location_metadata import LocationMetadata +from .types.oracledatabase import CreateAutonomousDatabaseRequest +from .types.oracledatabase import CreateCloudExadataInfrastructureRequest +from .types.oracledatabase import CreateCloudVmClusterRequest +from .types.oracledatabase import DeleteAutonomousDatabaseRequest +from .types.oracledatabase import DeleteCloudExadataInfrastructureRequest +from .types.oracledatabase import DeleteCloudVmClusterRequest +from .types.oracledatabase import GenerateAutonomousDatabaseWalletRequest +from .types.oracledatabase import GenerateAutonomousDatabaseWalletResponse +from .types.oracledatabase import GetAutonomousDatabaseRequest +from .types.oracledatabase import GetCloudExadataInfrastructureRequest +from .types.oracledatabase import GetCloudVmClusterRequest +from .types.oracledatabase import ListAutonomousDatabaseBackupsRequest +from .types.oracledatabase import ListAutonomousDatabaseBackupsResponse +from .types.oracledatabase import ListAutonomousDatabaseCharacterSetsRequest +from .types.oracledatabase import ListAutonomousDatabaseCharacterSetsResponse +from .types.oracledatabase import ListAutonomousDatabasesRequest +from .types.oracledatabase import ListAutonomousDatabasesResponse +from .types.oracledatabase import ListAutonomousDbVersionsRequest +from .types.oracledatabase import ListAutonomousDbVersionsResponse +from .types.oracledatabase import ListCloudExadataInfrastructuresRequest +from .types.oracledatabase import ListCloudExadataInfrastructuresResponse +from .types.oracledatabase import ListCloudVmClustersRequest +from .types.oracledatabase import ListCloudVmClustersResponse +from .types.oracledatabase import ListDbNodesRequest +from .types.oracledatabase import ListDbNodesResponse +from .types.oracledatabase import ListDbServersRequest +from .types.oracledatabase import ListDbServersResponse +from .types.oracledatabase import ListDbSystemShapesRequest +from .types.oracledatabase import ListDbSystemShapesResponse +from .types.oracledatabase import ListEntitlementsRequest +from .types.oracledatabase import ListEntitlementsResponse +from .types.oracledatabase import ListGiVersionsRequest +from .types.oracledatabase import ListGiVersionsResponse +from .types.oracledatabase import OperationMetadata +from .types.oracledatabase import RestoreAutonomousDatabaseRequest +from .types.vm_cluster import CloudVmCluster +from .types.vm_cluster import CloudVmClusterProperties +from .types.vm_cluster import DataCollectionOptions + +__all__ = ( +'AllConnectionStrings', +'AutonomousDatabase', +'AutonomousDatabaseApex', +'AutonomousDatabaseBackup', +'AutonomousDatabaseBackupProperties', +'AutonomousDatabaseCharacterSet', +'AutonomousDatabaseConnectionStrings', +'AutonomousDatabaseConnectionUrls', +'AutonomousDatabaseProperties', +'AutonomousDatabaseStandbySummary', +'AutonomousDbVersion', +'CloudAccountDetails', +'CloudExadataInfrastructure', +'CloudExadataInfrastructureProperties', +'CloudVmCluster', +'CloudVmClusterProperties', +'CreateAutonomousDatabaseRequest', +'CreateCloudExadataInfrastructureRequest', +'CreateCloudVmClusterRequest', +'CustomerContact', +'DBWorkload', +'DataCollectionOptions', +'DatabaseConnectionStringProfile', +'DbNode', +'DbNodeProperties', +'DbServer', +'DbServerProperties', +'DbSystemShape', +'DeleteAutonomousDatabaseRequest', +'DeleteCloudExadataInfrastructureRequest', +'DeleteCloudVmClusterRequest', +'Entitlement', +'GenerateAutonomousDatabaseWalletRequest', +'GenerateAutonomousDatabaseWalletResponse', +'GenerateType', +'GetAutonomousDatabaseRequest', +'GetCloudExadataInfrastructureRequest', +'GetCloudVmClusterRequest', +'GiVersion', +'ListAutonomousDatabaseBackupsRequest', +'ListAutonomousDatabaseBackupsResponse', +'ListAutonomousDatabaseCharacterSetsRequest', +'ListAutonomousDatabaseCharacterSetsResponse', +'ListAutonomousDatabasesRequest', +'ListAutonomousDatabasesResponse', +'ListAutonomousDbVersionsRequest', +'ListAutonomousDbVersionsResponse', +'ListCloudExadataInfrastructuresRequest', +'ListCloudExadataInfrastructuresResponse', +'ListCloudVmClustersRequest', +'ListCloudVmClustersResponse', +'ListDbNodesRequest', +'ListDbNodesResponse', +'ListDbServersRequest', +'ListDbServersResponse', +'ListDbSystemShapesRequest', +'ListDbSystemShapesResponse', +'ListEntitlementsRequest', +'ListEntitlementsResponse', +'ListGiVersionsRequest', +'ListGiVersionsResponse', +'LocationMetadata', +'MaintenanceWindow', +'OperationMetadata', +'OperationsInsightsState', +'OracleDatabaseClient', +'RestoreAutonomousDatabaseRequest', +'ScheduledOperationDetails', +'State', +) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/gapic_metadata.json new file mode 100644 index 000000000000..847abe3bdc22 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/gapic_metadata.json @@ -0,0 +1,128 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.oracledatabase_v1", + "protoPackage": "google.cloud.oracledatabase.v1", + "schema": "1.0", + "services": { + "OracleDatabase": { + "clients": { + "rest": { + "libraryClient": "OracleDatabaseClient", + "rpcs": { + "CreateAutonomousDatabase": { + "methods": [ + "create_autonomous_database" + ] + }, + "CreateCloudExadataInfrastructure": { + "methods": [ + "create_cloud_exadata_infrastructure" + ] + }, + "CreateCloudVmCluster": { + "methods": [ + "create_cloud_vm_cluster" + ] + }, + "DeleteAutonomousDatabase": { + "methods": [ + "delete_autonomous_database" + ] + }, + "DeleteCloudExadataInfrastructure": { + "methods": [ + "delete_cloud_exadata_infrastructure" + ] + }, + "DeleteCloudVmCluster": { + "methods": [ + "delete_cloud_vm_cluster" + ] + }, + "GenerateAutonomousDatabaseWallet": { + "methods": [ + "generate_autonomous_database_wallet" + ] + }, + "GetAutonomousDatabase": { + "methods": [ + "get_autonomous_database" + ] + }, + "GetCloudExadataInfrastructure": { + "methods": [ + "get_cloud_exadata_infrastructure" + ] + }, + "GetCloudVmCluster": { + "methods": [ + "get_cloud_vm_cluster" + ] + }, + "ListAutonomousDatabaseBackups": { + "methods": [ + "list_autonomous_database_backups" + ] + }, + "ListAutonomousDatabaseCharacterSets": { + "methods": [ + "list_autonomous_database_character_sets" + ] + }, + "ListAutonomousDatabases": { + "methods": [ + "list_autonomous_databases" + ] + }, + "ListAutonomousDbVersions": { + "methods": [ + "list_autonomous_db_versions" + ] + }, + "ListCloudExadataInfrastructures": { + "methods": [ + "list_cloud_exadata_infrastructures" + ] + }, + "ListCloudVmClusters": { + "methods": [ + "list_cloud_vm_clusters" + ] + }, + "ListDbNodes": { + "methods": [ + "list_db_nodes" + ] + }, + "ListDbServers": { + "methods": [ + "list_db_servers" + ] + }, + "ListDbSystemShapes": { + "methods": [ + "list_db_system_shapes" + ] + }, + "ListEntitlements": { + "methods": [ + "list_entitlements" + ] + }, + "ListGiVersions": { + "methods": [ + "list_gi_versions" + ] + }, + "RestoreAutonomousDatabase": { + "methods": [ + "restore_autonomous_database" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/gapic_version.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/py.typed b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/py.typed new file mode 100644 index 000000000000..cd7e437b6f62 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-oracledatabase package uses inline types. diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/__init__.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/__init__.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/__init__.py new file mode 100644 index 000000000000..4b2d17bc5ef2 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import OracleDatabaseClient + +__all__ = ( + 'OracleDatabaseClient', +) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/client.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/client.py new file mode 100644 index 000000000000..f1332e871961 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/client.py @@ -0,0 +1,3717 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.oracledatabase_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.oracledatabase_v1.services.oracle_database import pagers +from google.cloud.oracledatabase_v1.types import autonomous_database +from google.cloud.oracledatabase_v1.types import autonomous_database as gco_autonomous_database +from google.cloud.oracledatabase_v1.types import autonomous_database_character_set +from google.cloud.oracledatabase_v1.types import autonomous_db_backup +from google.cloud.oracledatabase_v1.types import autonomous_db_version +from google.cloud.oracledatabase_v1.types import db_node +from google.cloud.oracledatabase_v1.types import db_server +from google.cloud.oracledatabase_v1.types import db_system_shape +from google.cloud.oracledatabase_v1.types import entitlement +from google.cloud.oracledatabase_v1.types import exadata_infra +from google.cloud.oracledatabase_v1.types import gi_version +from google.cloud.oracledatabase_v1.types import oracledatabase +from google.cloud.oracledatabase_v1.types import vm_cluster +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import OracleDatabaseTransport, DEFAULT_CLIENT_INFO +from .transports.rest import OracleDatabaseRestTransport + + +class OracleDatabaseClientMeta(type): + """Metaclass for the OracleDatabase client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[OracleDatabaseTransport]] + _transport_registry["rest"] = OracleDatabaseRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[OracleDatabaseTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class OracleDatabaseClient(metaclass=OracleDatabaseClientMeta): + """Service describing handlers for resources""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "oracledatabase.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "oracledatabase.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OracleDatabaseClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OracleDatabaseClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> OracleDatabaseTransport: + """Returns the transport used by the client instance. + + Returns: + OracleDatabaseTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def autonomous_database_path(project: str,location: str,autonomous_database: str,) -> str: + """Returns a fully-qualified autonomous_database string.""" + return "projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}".format(project=project, location=location, autonomous_database=autonomous_database, ) + + @staticmethod + def parse_autonomous_database_path(path: str) -> Dict[str,str]: + """Parses a autonomous_database path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/autonomousDatabases/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def autonomous_database_backup_path(project: str,location: str,autonomous_database_backup: str,) -> str: + """Returns a fully-qualified autonomous_database_backup string.""" + return "projects/{project}/locations/{location}/autonomousDatabaseBackups/{autonomous_database_backup}".format(project=project, location=location, autonomous_database_backup=autonomous_database_backup, ) + + @staticmethod + def parse_autonomous_database_backup_path(path: str) -> Dict[str,str]: + """Parses a autonomous_database_backup path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/autonomousDatabaseBackups/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def autonomous_database_character_set_path(project: str,location: str,autonomous_database_character_set: str,) -> str: + """Returns a fully-qualified autonomous_database_character_set string.""" + return "projects/{project}/locations/{location}/autonomousDatabaseCharacterSets/{autonomous_database_character_set}".format(project=project, location=location, autonomous_database_character_set=autonomous_database_character_set, ) + + @staticmethod + def parse_autonomous_database_character_set_path(path: str) -> Dict[str,str]: + """Parses a autonomous_database_character_set path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/autonomousDatabaseCharacterSets/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def autonomous_db_version_path(project: str,location: str,autonomous_db_version: str,) -> str: + """Returns a fully-qualified autonomous_db_version string.""" + return "projects/{project}/locations/{location}/autonomousDbVersions/{autonomous_db_version}".format(project=project, location=location, autonomous_db_version=autonomous_db_version, ) + + @staticmethod + def parse_autonomous_db_version_path(path: str) -> Dict[str,str]: + """Parses a autonomous_db_version path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/autonomousDbVersions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def cloud_exadata_infrastructure_path(project: str,location: str,cloud_exadata_infrastructure: str,) -> str: + """Returns a fully-qualified cloud_exadata_infrastructure string.""" + return "projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}".format(project=project, location=location, cloud_exadata_infrastructure=cloud_exadata_infrastructure, ) + + @staticmethod + def parse_cloud_exadata_infrastructure_path(path: str) -> Dict[str,str]: + """Parses a cloud_exadata_infrastructure path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/cloudExadataInfrastructures/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def cloud_vm_cluster_path(project: str,location: str,cloud_vm_cluster: str,) -> str: + """Returns a fully-qualified cloud_vm_cluster string.""" + return "projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}".format(project=project, location=location, cloud_vm_cluster=cloud_vm_cluster, ) + + @staticmethod + def parse_cloud_vm_cluster_path(path: str) -> Dict[str,str]: + """Parses a cloud_vm_cluster path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/cloudVmClusters/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def db_node_path(project: str,location: str,cloud_vm_cluster: str,db_node: str,) -> str: + """Returns a fully-qualified db_node string.""" + return "projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}/dbNodes/{db_node}".format(project=project, location=location, cloud_vm_cluster=cloud_vm_cluster, db_node=db_node, ) + + @staticmethod + def parse_db_node_path(path: str) -> Dict[str,str]: + """Parses a db_node path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/cloudVmClusters/(?P.+?)/dbNodes/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def db_server_path(project: str,location: str,cloud_exadata_infrastructure: str,db_server: str,) -> str: + """Returns a fully-qualified db_server string.""" + return "projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}/dbServers/{db_server}".format(project=project, location=location, cloud_exadata_infrastructure=cloud_exadata_infrastructure, db_server=db_server, ) + + @staticmethod + def parse_db_server_path(path: str) -> Dict[str,str]: + """Parses a db_server path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/cloudExadataInfrastructures/(?P.+?)/dbServers/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def db_system_shape_path(project: str,location: str,db_system_shape: str,) -> str: + """Returns a fully-qualified db_system_shape string.""" + return "projects/{project}/locations/{location}/dbSystemShapes/{db_system_shape}".format(project=project, location=location, db_system_shape=db_system_shape, ) + + @staticmethod + def parse_db_system_shape_path(path: str) -> Dict[str,str]: + """Parses a db_system_shape path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/dbSystemShapes/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def entitlement_path(project: str,location: str,entitlement: str,) -> str: + """Returns a fully-qualified entitlement string.""" + return "projects/{project}/locations/{location}/entitlements/{entitlement}".format(project=project, location=location, entitlement=entitlement, ) + + @staticmethod + def parse_entitlement_path(path: str) -> Dict[str,str]: + """Parses a entitlement path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/entitlements/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def gi_version_path(project: str,location: str,gi_version: str,) -> str: + """Returns a fully-qualified gi_version string.""" + return "projects/{project}/locations/{location}/giVersions/{gi_version}".format(project=project, location=location, gi_version=gi_version, ) + + @staticmethod + def parse_gi_version_path(path: str) -> Dict[str,str]: + """Parses a gi_version path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/giVersions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def network_path(project: str,network: str,) -> str: + """Returns a fully-qualified network string.""" + return "projects/{project}/global/networks/{network}".format(project=project, network=network, ) + + @staticmethod + def parse_network_path(path: str) -> Dict[str,str]: + """Parses a network path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/global/networks/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = OracleDatabaseClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = OracleDatabaseClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = OracleDatabaseClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes(client_universe: str, + credentials: ga_credentials.Credentials) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = OracleDatabaseClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError("The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default.") + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = (self._is_universe_domain_valid or + OracleDatabaseClient._compare_universes(self.universe_domain, self.transport._credentials)) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, OracleDatabaseTransport, Callable[..., OracleDatabaseTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the oracle database client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,OracleDatabaseTransport,Callable[..., OracleDatabaseTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the OracleDatabaseTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = OracleDatabaseClient._read_environment_variables() + self._client_cert_source = OracleDatabaseClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = OracleDatabaseClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, OracleDatabaseTransport) + if transport_provided: + # transport is a OracleDatabaseTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(OracleDatabaseTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + OracleDatabaseClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[OracleDatabaseTransport], Callable[..., OracleDatabaseTransport]] = ( + OracleDatabaseClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., OracleDatabaseTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def list_cloud_exadata_infrastructures(self, + request: Optional[Union[oracledatabase.ListCloudExadataInfrastructuresRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCloudExadataInfrastructuresPager: + r"""Lists Exadata Infrastructures in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_cloud_exadata_infrastructures(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListCloudExadataInfrastructuresRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_cloud_exadata_infrastructures(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListCloudExadataInfrastructuresRequest, dict]): + The request object. The request for ``CloudExadataInfrastructures.List``. + parent (str): + Required. The parent value for + CloudExadataInfrastructure in the + following format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListCloudExadataInfrastructuresPager: + The response for CloudExadataInfrastructures.list. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.ListCloudExadataInfrastructuresRequest): + request = oracledatabase.ListCloudExadataInfrastructuresRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_cloud_exadata_infrastructures] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListCloudExadataInfrastructuresPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_cloud_exadata_infrastructure(self, + request: Optional[Union[oracledatabase.GetCloudExadataInfrastructureRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> exadata_infra.CloudExadataInfrastructure: + r"""Gets details of a single Exadata Infrastructure. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_get_cloud_exadata_infrastructure(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.GetCloudExadataInfrastructureRequest( + name="name_value", + ) + + # Make the request + response = client.get_cloud_exadata_infrastructure(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.GetCloudExadataInfrastructureRequest, dict]): + The request object. The request for ``CloudExadataInfrastructure.Get``. + name (str): + Required. The name of the Cloud Exadata Infrastructure + in the following format: + projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.types.CloudExadataInfrastructure: + Represents CloudExadataInfrastructure + resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/CloudExadataInfrastructure/ + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.GetCloudExadataInfrastructureRequest): + request = oracledatabase.GetCloudExadataInfrastructureRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_cloud_exadata_infrastructure] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_cloud_exadata_infrastructure(self, + request: Optional[Union[oracledatabase.CreateCloudExadataInfrastructureRequest, dict]] = None, + *, + parent: Optional[str] = None, + cloud_exadata_infrastructure: Optional[exadata_infra.CloudExadataInfrastructure] = None, + cloud_exadata_infrastructure_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Exadata Infrastructure in a given + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_create_cloud_exadata_infrastructure(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.CreateCloudExadataInfrastructureRequest( + parent="parent_value", + cloud_exadata_infrastructure_id="cloud_exadata_infrastructure_id_value", + ) + + # Make the request + operation = client.create_cloud_exadata_infrastructure(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.CreateCloudExadataInfrastructureRequest, dict]): + The request object. The request for ``CloudExadataInfrastructure.Create``. + parent (str): + Required. The parent value for + CloudExadataInfrastructure in the + following format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cloud_exadata_infrastructure (google.cloud.oracledatabase_v1.types.CloudExadataInfrastructure): + Required. Details of the Exadata + Infrastructure instance to create. + + This corresponds to the ``cloud_exadata_infrastructure`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cloud_exadata_infrastructure_id (str): + Required. The ID of the Exadata Infrastructure to + create. This value is restricted to + (^`a-z <[a-z0-9-]{0,61}[a-z0-9]>`__?$) and must be a + maximum of 63 characters in length. The value must start + with a letter and end with a letter or a number. + + This corresponds to the ``cloud_exadata_infrastructure_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.oracledatabase_v1.types.CloudExadataInfrastructure` Represents CloudExadataInfrastructure resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/CloudExadataInfrastructure/ + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, cloud_exadata_infrastructure, cloud_exadata_infrastructure_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.CreateCloudExadataInfrastructureRequest): + request = oracledatabase.CreateCloudExadataInfrastructureRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if cloud_exadata_infrastructure is not None: + request.cloud_exadata_infrastructure = cloud_exadata_infrastructure + if cloud_exadata_infrastructure_id is not None: + request.cloud_exadata_infrastructure_id = cloud_exadata_infrastructure_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_cloud_exadata_infrastructure] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + exadata_infra.CloudExadataInfrastructure, + metadata_type=oracledatabase.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_cloud_exadata_infrastructure(self, + request: Optional[Union[oracledatabase.DeleteCloudExadataInfrastructureRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Exadata Infrastructure. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_delete_cloud_exadata_infrastructure(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.DeleteCloudExadataInfrastructureRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_cloud_exadata_infrastructure(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.DeleteCloudExadataInfrastructureRequest, dict]): + The request object. The request for ``CloudExadataInfrastructure.Delete``. + name (str): + Required. The name of the Cloud Exadata Infrastructure + in the following format: + projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.DeleteCloudExadataInfrastructureRequest): + request = oracledatabase.DeleteCloudExadataInfrastructureRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_cloud_exadata_infrastructure] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=oracledatabase.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_cloud_vm_clusters(self, + request: Optional[Union[oracledatabase.ListCloudVmClustersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCloudVmClustersPager: + r"""Lists the VM Clusters in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_cloud_vm_clusters(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListCloudVmClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_cloud_vm_clusters(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListCloudVmClustersRequest, dict]): + The request object. The request for ``CloudVmCluster.List``. + parent (str): + Required. The name of the parent in + the following format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListCloudVmClustersPager: + The response for CloudVmCluster.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.ListCloudVmClustersRequest): + request = oracledatabase.ListCloudVmClustersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_cloud_vm_clusters] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListCloudVmClustersPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_cloud_vm_cluster(self, + request: Optional[Union[oracledatabase.GetCloudVmClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vm_cluster.CloudVmCluster: + r"""Gets details of a single VM Cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_get_cloud_vm_cluster(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.GetCloudVmClusterRequest( + name="name_value", + ) + + # Make the request + response = client.get_cloud_vm_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.GetCloudVmClusterRequest, dict]): + The request object. The request for ``CloudVmCluster.Get``. + name (str): + Required. The name of the Cloud VM Cluster in the + following format: + projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.types.CloudVmCluster: + Details of the Cloud VM Cluster + resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/CloudVmCluster/ + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.GetCloudVmClusterRequest): + request = oracledatabase.GetCloudVmClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_cloud_vm_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_cloud_vm_cluster(self, + request: Optional[Union[oracledatabase.CreateCloudVmClusterRequest, dict]] = None, + *, + parent: Optional[str] = None, + cloud_vm_cluster: Optional[vm_cluster.CloudVmCluster] = None, + cloud_vm_cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new VM Cluster in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_create_cloud_vm_cluster(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + cloud_vm_cluster = oracledatabase_v1.CloudVmCluster() + cloud_vm_cluster.exadata_infrastructure = "exadata_infrastructure_value" + cloud_vm_cluster.cidr = "cidr_value" + cloud_vm_cluster.backup_subnet_cidr = "backup_subnet_cidr_value" + cloud_vm_cluster.network = "network_value" + + request = oracledatabase_v1.CreateCloudVmClusterRequest( + parent="parent_value", + cloud_vm_cluster_id="cloud_vm_cluster_id_value", + cloud_vm_cluster=cloud_vm_cluster, + ) + + # Make the request + operation = client.create_cloud_vm_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.CreateCloudVmClusterRequest, dict]): + The request object. The request for ``CloudVmCluster.Create``. + parent (str): + Required. The name of the parent in + the following format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cloud_vm_cluster (google.cloud.oracledatabase_v1.types.CloudVmCluster): + Required. The resource being created + This corresponds to the ``cloud_vm_cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cloud_vm_cluster_id (str): + Required. The ID of the VM Cluster to create. This value + is restricted to (^`a-z <[a-z0-9-]{0,61}[a-z0-9]>`__?$) + and must be a maximum of 63 characters in length. The + value must start with a letter and end with a letter or + a number. + + This corresponds to the ``cloud_vm_cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.oracledatabase_v1.types.CloudVmCluster` Details of the Cloud VM Cluster resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/CloudVmCluster/ + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, cloud_vm_cluster, cloud_vm_cluster_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.CreateCloudVmClusterRequest): + request = oracledatabase.CreateCloudVmClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if cloud_vm_cluster is not None: + request.cloud_vm_cluster = cloud_vm_cluster + if cloud_vm_cluster_id is not None: + request.cloud_vm_cluster_id = cloud_vm_cluster_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_cloud_vm_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + vm_cluster.CloudVmCluster, + metadata_type=oracledatabase.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_cloud_vm_cluster(self, + request: Optional[Union[oracledatabase.DeleteCloudVmClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single VM Cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_delete_cloud_vm_cluster(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.DeleteCloudVmClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_cloud_vm_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.DeleteCloudVmClusterRequest, dict]): + The request object. The request for ``CloudVmCluster.Delete``. + name (str): + Required. The name of the Cloud VM Cluster in the + following format: + projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.DeleteCloudVmClusterRequest): + request = oracledatabase.DeleteCloudVmClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_cloud_vm_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=oracledatabase.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_entitlements(self, + request: Optional[Union[oracledatabase.ListEntitlementsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEntitlementsPager: + r"""Lists the entitlements in a given project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_entitlements(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListEntitlementsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entitlements(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListEntitlementsRequest, dict]): + The request object. The request for ``Entitlement.List``. + parent (str): + Required. The parent value for the + entitlement in the following format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListEntitlementsPager: + The response for Entitlement.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.ListEntitlementsRequest): + request = oracledatabase.ListEntitlementsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_entitlements] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEntitlementsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_db_servers(self, + request: Optional[Union[oracledatabase.ListDbServersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDbServersPager: + r"""Lists the database servers of an Exadata + Infrastructure instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_db_servers(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListDbServersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_db_servers(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListDbServersRequest, dict]): + The request object. The request for ``DbServer.List``. + parent (str): + Required. The parent value for + database server in the following format: + projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloudExadataInfrastructure}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListDbServersPager: + The response for DbServer.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.ListDbServersRequest): + request = oracledatabase.ListDbServersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_db_servers] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDbServersPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_db_nodes(self, + request: Optional[Union[oracledatabase.ListDbNodesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDbNodesPager: + r"""Lists the database nodes of a VM Cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_db_nodes(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListDbNodesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_db_nodes(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListDbNodesRequest, dict]): + The request object. The request for ``DbNode.List``. + parent (str): + Required. The parent value for + database node in the following format: + projects/{project}/locations/{location}/cloudVmClusters/{cloudVmCluster}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListDbNodesPager: + The response for DbNode.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.ListDbNodesRequest): + request = oracledatabase.ListDbNodesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_db_nodes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDbNodesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_gi_versions(self, + request: Optional[Union[oracledatabase.ListGiVersionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListGiVersionsPager: + r"""Lists all the valid Oracle Grid Infrastructure (GI) + versions for the given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_gi_versions(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListGiVersionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_gi_versions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListGiVersionsRequest, dict]): + The request object. The request for ``GiVersion.List``. + parent (str): + Required. The parent value for Grid + Infrastructure Version in the following + format: Format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListGiVersionsPager: + The response for GiVersion.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.ListGiVersionsRequest): + request = oracledatabase.ListGiVersionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_gi_versions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListGiVersionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_db_system_shapes(self, + request: Optional[Union[oracledatabase.ListDbSystemShapesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDbSystemShapesPager: + r"""Lists the database system shapes available for the + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_db_system_shapes(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListDbSystemShapesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_db_system_shapes(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListDbSystemShapesRequest, dict]): + The request object. The request for ``DbSystemShape.List``. + parent (str): + Required. The parent value for + Database System Shapes in the following + format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListDbSystemShapesPager: + The response for DbSystemShape.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.ListDbSystemShapesRequest): + request = oracledatabase.ListDbSystemShapesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_db_system_shapes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDbSystemShapesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_autonomous_databases(self, + request: Optional[Union[oracledatabase.ListAutonomousDatabasesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAutonomousDatabasesPager: + r"""Lists the Autonomous Databases in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_autonomous_databases(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListAutonomousDatabasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_autonomous_databases(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListAutonomousDatabasesRequest, dict]): + The request object. The request for ``AutonomousDatabase.List``. + parent (str): + Required. The parent value for the + Autonomous Database in the following + format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListAutonomousDatabasesPager: + The response for AutonomousDatabase.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.ListAutonomousDatabasesRequest): + request = oracledatabase.ListAutonomousDatabasesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_autonomous_databases] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAutonomousDatabasesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_autonomous_database(self, + request: Optional[Union[oracledatabase.GetAutonomousDatabaseRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autonomous_database.AutonomousDatabase: + r"""Gets the details of a single Autonomous Database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_get_autonomous_database(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.GetAutonomousDatabaseRequest( + name="name_value", + ) + + # Make the request + response = client.get_autonomous_database(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.GetAutonomousDatabaseRequest, dict]): + The request object. The request for ``AutonomousDatabase.Get``. + name (str): + Required. The name of the Autonomous Database in the + following format: + projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.types.AutonomousDatabase: + Details of the Autonomous Database + resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/AutonomousDatabase/ + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.GetAutonomousDatabaseRequest): + request = oracledatabase.GetAutonomousDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_autonomous_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_autonomous_database(self, + request: Optional[Union[oracledatabase.CreateAutonomousDatabaseRequest, dict]] = None, + *, + parent: Optional[str] = None, + autonomous_database: Optional[gco_autonomous_database.AutonomousDatabase] = None, + autonomous_database_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Autonomous Database in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_create_autonomous_database(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + autonomous_database = oracledatabase_v1.AutonomousDatabase() + autonomous_database.network = "network_value" + autonomous_database.cidr = "cidr_value" + + request = oracledatabase_v1.CreateAutonomousDatabaseRequest( + parent="parent_value", + autonomous_database_id="autonomous_database_id_value", + autonomous_database=autonomous_database, + ) + + # Make the request + operation = client.create_autonomous_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.CreateAutonomousDatabaseRequest, dict]): + The request object. The request for ``AutonomousDatabase.Create``. + parent (str): + Required. The name of the parent in + the following format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + autonomous_database (google.cloud.oracledatabase_v1.types.AutonomousDatabase): + Required. The Autonomous Database + being created. + + This corresponds to the ``autonomous_database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + autonomous_database_id (str): + Required. The ID of the Autonomous Database to create. + This value is restricted to + (^`a-z <[a-z0-9-]{0,61}[a-z0-9]>`__?$) and must be a + maximum of 63 characters in length. The value must start + with a letter and end with a letter or a number. + + This corresponds to the ``autonomous_database_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.oracledatabase_v1.types.AutonomousDatabase` Details of the Autonomous Database resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/AutonomousDatabase/ + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, autonomous_database, autonomous_database_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.CreateAutonomousDatabaseRequest): + request = oracledatabase.CreateAutonomousDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if autonomous_database is not None: + request.autonomous_database = autonomous_database + if autonomous_database_id is not None: + request.autonomous_database_id = autonomous_database_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_autonomous_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gco_autonomous_database.AutonomousDatabase, + metadata_type=oracledatabase.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_autonomous_database(self, + request: Optional[Union[oracledatabase.DeleteAutonomousDatabaseRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Autonomous Database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_delete_autonomous_database(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.DeleteAutonomousDatabaseRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_autonomous_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.DeleteAutonomousDatabaseRequest, dict]): + The request object. The request for ``AutonomousDatabase.Delete``. + name (str): + Required. The name of the resource in the following + format: + projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.DeleteAutonomousDatabaseRequest): + request = oracledatabase.DeleteAutonomousDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_autonomous_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=oracledatabase.OperationMetadata, + ) + + # Done; return the response. + return response + + def restore_autonomous_database(self, + request: Optional[Union[oracledatabase.RestoreAutonomousDatabaseRequest, dict]] = None, + *, + name: Optional[str] = None, + restore_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Restores a single Autonomous Database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_restore_autonomous_database(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.RestoreAutonomousDatabaseRequest( + name="name_value", + ) + + # Make the request + operation = client.restore_autonomous_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.RestoreAutonomousDatabaseRequest, dict]): + The request object. The request for ``AutonomousDatabase.Restore``. + name (str): + Required. The name of the Autonomous Database in the + following format: + projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + restore_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The time and date to + restore the database to. + + This corresponds to the ``restore_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.oracledatabase_v1.types.AutonomousDatabase` Details of the Autonomous Database resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/AutonomousDatabase/ + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, restore_time]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.RestoreAutonomousDatabaseRequest): + request = oracledatabase.RestoreAutonomousDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if restore_time is not None: + request.restore_time = restore_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.restore_autonomous_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + autonomous_database.AutonomousDatabase, + metadata_type=oracledatabase.OperationMetadata, + ) + + # Done; return the response. + return response + + def generate_autonomous_database_wallet(self, + request: Optional[Union[oracledatabase.GenerateAutonomousDatabaseWalletRequest, dict]] = None, + *, + name: Optional[str] = None, + type_: Optional[autonomous_database.GenerateType] = None, + is_regional: Optional[bool] = None, + password: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.GenerateAutonomousDatabaseWalletResponse: + r"""Generates a wallet for an Autonomous Database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_generate_autonomous_database_wallet(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.GenerateAutonomousDatabaseWalletRequest( + name="name_value", + password="password_value", + ) + + # Make the request + response = client.generate_autonomous_database_wallet(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.GenerateAutonomousDatabaseWalletRequest, dict]): + The request object. The request for ``AutonomousDatabase.GenerateWallet``. + name (str): + Required. The name of the Autonomous Database in the + following format: + projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + type_ (google.cloud.oracledatabase_v1.types.GenerateType): + Optional. The type of wallet + generation for the Autonomous Database. + The default value is SINGLE. + + This corresponds to the ``type_`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + is_regional (bool): + Optional. True when requesting + regional connection strings in PDB + connect info, applicable to cross-region + Data Guard only. + + This corresponds to the ``is_regional`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + password (str): + Required. The password used to + encrypt the keys inside the wallet. The + password must be a minimum of 8 + characters. + + This corresponds to the ``password`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.types.GenerateAutonomousDatabaseWalletResponse: + The response for AutonomousDatabase.GenerateWallet. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, type_, is_regional, password]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.GenerateAutonomousDatabaseWalletRequest): + request = oracledatabase.GenerateAutonomousDatabaseWalletRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if type_ is not None: + request.type_ = type_ + if is_regional is not None: + request.is_regional = is_regional + if password is not None: + request.password = password + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.generate_autonomous_database_wallet] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_autonomous_db_versions(self, + request: Optional[Union[oracledatabase.ListAutonomousDbVersionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAutonomousDbVersionsPager: + r"""Lists all the available Autonomous Database versions + for a project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_autonomous_db_versions(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListAutonomousDbVersionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_autonomous_db_versions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListAutonomousDbVersionsRequest, dict]): + The request object. The request for ``AutonomousDbVersion.List``. + parent (str): + Required. The parent value for the + Autonomous Database in the following + format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListAutonomousDbVersionsPager: + The response for AutonomousDbVersion.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.ListAutonomousDbVersionsRequest): + request = oracledatabase.ListAutonomousDbVersionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_autonomous_db_versions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAutonomousDbVersionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_autonomous_database_character_sets(self, + request: Optional[Union[oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAutonomousDatabaseCharacterSetsPager: + r"""Lists Autonomous Database Character Sets in a given + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_autonomous_database_character_sets(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListAutonomousDatabaseCharacterSetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_autonomous_database_character_sets(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseCharacterSetsRequest, dict]): + The request object. The request for ``AutonomousDatabaseCharacterSet.List``. + parent (str): + Required. The parent value for the + Autonomous Database in the following + format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListAutonomousDatabaseCharacterSetsPager: + The response for AutonomousDatabaseCharacterSet.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.ListAutonomousDatabaseCharacterSetsRequest): + request = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_autonomous_database_character_sets] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAutonomousDatabaseCharacterSetsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_autonomous_database_backups(self, + request: Optional[Union[oracledatabase.ListAutonomousDatabaseBackupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAutonomousDatabaseBackupsPager: + r"""Lists the long-term and automatic backups of an + Autonomous Database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import oracledatabase_v1 + + def sample_list_autonomous_database_backups(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListAutonomousDatabaseBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_autonomous_database_backups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseBackupsRequest, dict]): + The request object. The request for ``AutonomousDatabaseBackup.List``. + parent (str): + Required. The parent value for + ListAutonomousDatabaseBackups in the + following format: + projects/{project}/locations/{location}. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListAutonomousDatabaseBackupsPager: + The response for AutonomousDatabaseBackup.List. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, oracledatabase.ListAutonomousDatabaseBackupsRequest): + request = oracledatabase.ListAutonomousDatabaseBackupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_autonomous_database_backups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAutonomousDatabaseBackupsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "OracleDatabaseClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "OracleDatabaseClient", +) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/pagers.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/pagers.py new file mode 100644 index 000000000000..8e1634d718bd --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/pagers.py @@ -0,0 +1,764 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.oracledatabase_v1.types import autonomous_database +from google.cloud.oracledatabase_v1.types import autonomous_database_character_set +from google.cloud.oracledatabase_v1.types import autonomous_db_backup +from google.cloud.oracledatabase_v1.types import autonomous_db_version +from google.cloud.oracledatabase_v1.types import db_node +from google.cloud.oracledatabase_v1.types import db_server +from google.cloud.oracledatabase_v1.types import db_system_shape +from google.cloud.oracledatabase_v1.types import entitlement +from google.cloud.oracledatabase_v1.types import exadata_infra +from google.cloud.oracledatabase_v1.types import gi_version +from google.cloud.oracledatabase_v1.types import oracledatabase +from google.cloud.oracledatabase_v1.types import vm_cluster + + +class ListCloudExadataInfrastructuresPager: + """A pager for iterating through ``list_cloud_exadata_infrastructures`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListCloudExadataInfrastructuresResponse` object, and + provides an ``__iter__`` method to iterate through its + ``cloud_exadata_infrastructures`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListCloudExadataInfrastructures`` requests and continue to iterate + through the ``cloud_exadata_infrastructures`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListCloudExadataInfrastructuresResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., oracledatabase.ListCloudExadataInfrastructuresResponse], + request: oracledatabase.ListCloudExadataInfrastructuresRequest, + response: oracledatabase.ListCloudExadataInfrastructuresResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListCloudExadataInfrastructuresRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListCloudExadataInfrastructuresResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListCloudExadataInfrastructuresRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListCloudExadataInfrastructuresResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[exadata_infra.CloudExadataInfrastructure]: + for page in self.pages: + yield from page.cloud_exadata_infrastructures + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListCloudVmClustersPager: + """A pager for iterating through ``list_cloud_vm_clusters`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListCloudVmClustersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``cloud_vm_clusters`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListCloudVmClusters`` requests and continue to iterate + through the ``cloud_vm_clusters`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListCloudVmClustersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., oracledatabase.ListCloudVmClustersResponse], + request: oracledatabase.ListCloudVmClustersRequest, + response: oracledatabase.ListCloudVmClustersResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListCloudVmClustersRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListCloudVmClustersResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListCloudVmClustersRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListCloudVmClustersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[vm_cluster.CloudVmCluster]: + for page in self.pages: + yield from page.cloud_vm_clusters + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListEntitlementsPager: + """A pager for iterating through ``list_entitlements`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListEntitlementsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``entitlements`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListEntitlements`` requests and continue to iterate + through the ``entitlements`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListEntitlementsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., oracledatabase.ListEntitlementsResponse], + request: oracledatabase.ListEntitlementsRequest, + response: oracledatabase.ListEntitlementsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListEntitlementsRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListEntitlementsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListEntitlementsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListEntitlementsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[entitlement.Entitlement]: + for page in self.pages: + yield from page.entitlements + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDbServersPager: + """A pager for iterating through ``list_db_servers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListDbServersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``db_servers`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDbServers`` requests and continue to iterate + through the ``db_servers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListDbServersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., oracledatabase.ListDbServersResponse], + request: oracledatabase.ListDbServersRequest, + response: oracledatabase.ListDbServersResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListDbServersRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListDbServersResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListDbServersRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListDbServersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[db_server.DbServer]: + for page in self.pages: + yield from page.db_servers + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDbNodesPager: + """A pager for iterating through ``list_db_nodes`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListDbNodesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``db_nodes`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDbNodes`` requests and continue to iterate + through the ``db_nodes`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListDbNodesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., oracledatabase.ListDbNodesResponse], + request: oracledatabase.ListDbNodesRequest, + response: oracledatabase.ListDbNodesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListDbNodesRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListDbNodesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListDbNodesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListDbNodesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[db_node.DbNode]: + for page in self.pages: + yield from page.db_nodes + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListGiVersionsPager: + """A pager for iterating through ``list_gi_versions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListGiVersionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``gi_versions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListGiVersions`` requests and continue to iterate + through the ``gi_versions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListGiVersionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., oracledatabase.ListGiVersionsResponse], + request: oracledatabase.ListGiVersionsRequest, + response: oracledatabase.ListGiVersionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListGiVersionsRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListGiVersionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListGiVersionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListGiVersionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[gi_version.GiVersion]: + for page in self.pages: + yield from page.gi_versions + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDbSystemShapesPager: + """A pager for iterating through ``list_db_system_shapes`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListDbSystemShapesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``db_system_shapes`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDbSystemShapes`` requests and continue to iterate + through the ``db_system_shapes`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListDbSystemShapesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., oracledatabase.ListDbSystemShapesResponse], + request: oracledatabase.ListDbSystemShapesRequest, + response: oracledatabase.ListDbSystemShapesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListDbSystemShapesRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListDbSystemShapesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListDbSystemShapesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListDbSystemShapesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[db_system_shape.DbSystemShape]: + for page in self.pages: + yield from page.db_system_shapes + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListAutonomousDatabasesPager: + """A pager for iterating through ``list_autonomous_databases`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListAutonomousDatabasesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``autonomous_databases`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAutonomousDatabases`` requests and continue to iterate + through the ``autonomous_databases`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListAutonomousDatabasesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., oracledatabase.ListAutonomousDatabasesResponse], + request: oracledatabase.ListAutonomousDatabasesRequest, + response: oracledatabase.ListAutonomousDatabasesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListAutonomousDatabasesRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListAutonomousDatabasesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListAutonomousDatabasesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListAutonomousDatabasesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[autonomous_database.AutonomousDatabase]: + for page in self.pages: + yield from page.autonomous_databases + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListAutonomousDbVersionsPager: + """A pager for iterating through ``list_autonomous_db_versions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListAutonomousDbVersionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``autonomous_db_versions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAutonomousDbVersions`` requests and continue to iterate + through the ``autonomous_db_versions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListAutonomousDbVersionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., oracledatabase.ListAutonomousDbVersionsResponse], + request: oracledatabase.ListAutonomousDbVersionsRequest, + response: oracledatabase.ListAutonomousDbVersionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListAutonomousDbVersionsRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListAutonomousDbVersionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListAutonomousDbVersionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListAutonomousDbVersionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[autonomous_db_version.AutonomousDbVersion]: + for page in self.pages: + yield from page.autonomous_db_versions + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListAutonomousDatabaseCharacterSetsPager: + """A pager for iterating through ``list_autonomous_database_character_sets`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseCharacterSetsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``autonomous_database_character_sets`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAutonomousDatabaseCharacterSets`` requests and continue to iterate + through the ``autonomous_database_character_sets`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseCharacterSetsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., oracledatabase.ListAutonomousDatabaseCharacterSetsResponse], + request: oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, + response: oracledatabase.ListAutonomousDatabaseCharacterSetsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseCharacterSetsRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseCharacterSetsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListAutonomousDatabaseCharacterSetsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[autonomous_database_character_set.AutonomousDatabaseCharacterSet]: + for page in self.pages: + yield from page.autonomous_database_character_sets + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListAutonomousDatabaseBackupsPager: + """A pager for iterating through ``list_autonomous_database_backups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseBackupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``autonomous_database_backups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAutonomousDatabaseBackups`` requests and continue to iterate + through the ``autonomous_database_backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseBackupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., oracledatabase.ListAutonomousDatabaseBackupsResponse], + request: oracledatabase.ListAutonomousDatabaseBackupsRequest, + response: oracledatabase.ListAutonomousDatabaseBackupsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseBackupsRequest): + The initial request object. + response (google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseBackupsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = oracledatabase.ListAutonomousDatabaseBackupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[oracledatabase.ListAutonomousDatabaseBackupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[autonomous_db_backup.AutonomousDatabaseBackup]: + for page in self.pages: + yield from page.autonomous_database_backups + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/__init__.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/__init__.py new file mode 100644 index 000000000000..a6e6aeba08ab --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import OracleDatabaseTransport +from .rest import OracleDatabaseRestTransport +from .rest import OracleDatabaseRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[OracleDatabaseTransport]] +_transport_registry['rest'] = OracleDatabaseRestTransport + +__all__ = ( + 'OracleDatabaseTransport', + 'OracleDatabaseRestTransport', + 'OracleDatabaseRestInterceptor', +) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/base.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/base.py new file mode 100644 index 000000000000..72af8abc4db0 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/base.py @@ -0,0 +1,679 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.oracledatabase_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.oracledatabase_v1.types import autonomous_database +from google.cloud.oracledatabase_v1.types import exadata_infra +from google.cloud.oracledatabase_v1.types import oracledatabase +from google.cloud.oracledatabase_v1.types import vm_cluster +from google.longrunning import operations_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class OracleDatabaseTransport(abc.ABC): + """Abstract transport class for OracleDatabase.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'oracledatabase.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'oracledatabase.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_cloud_exadata_infrastructures: gapic_v1.method.wrap_method( + self.list_cloud_exadata_infrastructures, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_cloud_exadata_infrastructure: gapic_v1.method.wrap_method( + self.get_cloud_exadata_infrastructure, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_cloud_exadata_infrastructure: gapic_v1.method.wrap_method( + self.create_cloud_exadata_infrastructure, + default_timeout=None, + client_info=client_info, + ), + self.delete_cloud_exadata_infrastructure: gapic_v1.method.wrap_method( + self.delete_cloud_exadata_infrastructure, + default_timeout=None, + client_info=client_info, + ), + self.list_cloud_vm_clusters: gapic_v1.method.wrap_method( + self.list_cloud_vm_clusters, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_cloud_vm_cluster: gapic_v1.method.wrap_method( + self.get_cloud_vm_cluster, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_cloud_vm_cluster: gapic_v1.method.wrap_method( + self.create_cloud_vm_cluster, + default_timeout=None, + client_info=client_info, + ), + self.delete_cloud_vm_cluster: gapic_v1.method.wrap_method( + self.delete_cloud_vm_cluster, + default_timeout=None, + client_info=client_info, + ), + self.list_entitlements: gapic_v1.method.wrap_method( + self.list_entitlements, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_db_servers: gapic_v1.method.wrap_method( + self.list_db_servers, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_db_nodes: gapic_v1.method.wrap_method( + self.list_db_nodes, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_gi_versions: gapic_v1.method.wrap_method( + self.list_gi_versions, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_db_system_shapes: gapic_v1.method.wrap_method( + self.list_db_system_shapes, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_autonomous_databases: gapic_v1.method.wrap_method( + self.list_autonomous_databases, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_autonomous_database: gapic_v1.method.wrap_method( + self.get_autonomous_database, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_autonomous_database: gapic_v1.method.wrap_method( + self.create_autonomous_database, + default_timeout=None, + client_info=client_info, + ), + self.delete_autonomous_database: gapic_v1.method.wrap_method( + self.delete_autonomous_database, + default_timeout=None, + client_info=client_info, + ), + self.restore_autonomous_database: gapic_v1.method.wrap_method( + self.restore_autonomous_database, + default_timeout=None, + client_info=client_info, + ), + self.generate_autonomous_database_wallet: gapic_v1.method.wrap_method( + self.generate_autonomous_database_wallet, + default_timeout=None, + client_info=client_info, + ), + self.list_autonomous_db_versions: gapic_v1.method.wrap_method( + self.list_autonomous_db_versions, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_autonomous_database_character_sets: gapic_v1.method.wrap_method( + self.list_autonomous_database_character_sets, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_autonomous_database_backups: gapic_v1.method.wrap_method( + self.list_autonomous_database_backups, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_cloud_exadata_infrastructures(self) -> Callable[ + [oracledatabase.ListCloudExadataInfrastructuresRequest], + Union[ + oracledatabase.ListCloudExadataInfrastructuresResponse, + Awaitable[oracledatabase.ListCloudExadataInfrastructuresResponse] + ]]: + raise NotImplementedError() + + @property + def get_cloud_exadata_infrastructure(self) -> Callable[ + [oracledatabase.GetCloudExadataInfrastructureRequest], + Union[ + exadata_infra.CloudExadataInfrastructure, + Awaitable[exadata_infra.CloudExadataInfrastructure] + ]]: + raise NotImplementedError() + + @property + def create_cloud_exadata_infrastructure(self) -> Callable[ + [oracledatabase.CreateCloudExadataInfrastructureRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_cloud_exadata_infrastructure(self) -> Callable[ + [oracledatabase.DeleteCloudExadataInfrastructureRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_cloud_vm_clusters(self) -> Callable[ + [oracledatabase.ListCloudVmClustersRequest], + Union[ + oracledatabase.ListCloudVmClustersResponse, + Awaitable[oracledatabase.ListCloudVmClustersResponse] + ]]: + raise NotImplementedError() + + @property + def get_cloud_vm_cluster(self) -> Callable[ + [oracledatabase.GetCloudVmClusterRequest], + Union[ + vm_cluster.CloudVmCluster, + Awaitable[vm_cluster.CloudVmCluster] + ]]: + raise NotImplementedError() + + @property + def create_cloud_vm_cluster(self) -> Callable[ + [oracledatabase.CreateCloudVmClusterRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_cloud_vm_cluster(self) -> Callable[ + [oracledatabase.DeleteCloudVmClusterRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_entitlements(self) -> Callable[ + [oracledatabase.ListEntitlementsRequest], + Union[ + oracledatabase.ListEntitlementsResponse, + Awaitable[oracledatabase.ListEntitlementsResponse] + ]]: + raise NotImplementedError() + + @property + def list_db_servers(self) -> Callable[ + [oracledatabase.ListDbServersRequest], + Union[ + oracledatabase.ListDbServersResponse, + Awaitable[oracledatabase.ListDbServersResponse] + ]]: + raise NotImplementedError() + + @property + def list_db_nodes(self) -> Callable[ + [oracledatabase.ListDbNodesRequest], + Union[ + oracledatabase.ListDbNodesResponse, + Awaitable[oracledatabase.ListDbNodesResponse] + ]]: + raise NotImplementedError() + + @property + def list_gi_versions(self) -> Callable[ + [oracledatabase.ListGiVersionsRequest], + Union[ + oracledatabase.ListGiVersionsResponse, + Awaitable[oracledatabase.ListGiVersionsResponse] + ]]: + raise NotImplementedError() + + @property + def list_db_system_shapes(self) -> Callable[ + [oracledatabase.ListDbSystemShapesRequest], + Union[ + oracledatabase.ListDbSystemShapesResponse, + Awaitable[oracledatabase.ListDbSystemShapesResponse] + ]]: + raise NotImplementedError() + + @property + def list_autonomous_databases(self) -> Callable[ + [oracledatabase.ListAutonomousDatabasesRequest], + Union[ + oracledatabase.ListAutonomousDatabasesResponse, + Awaitable[oracledatabase.ListAutonomousDatabasesResponse] + ]]: + raise NotImplementedError() + + @property + def get_autonomous_database(self) -> Callable[ + [oracledatabase.GetAutonomousDatabaseRequest], + Union[ + autonomous_database.AutonomousDatabase, + Awaitable[autonomous_database.AutonomousDatabase] + ]]: + raise NotImplementedError() + + @property + def create_autonomous_database(self) -> Callable[ + [oracledatabase.CreateAutonomousDatabaseRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_autonomous_database(self) -> Callable[ + [oracledatabase.DeleteAutonomousDatabaseRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def restore_autonomous_database(self) -> Callable[ + [oracledatabase.RestoreAutonomousDatabaseRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def generate_autonomous_database_wallet(self) -> Callable[ + [oracledatabase.GenerateAutonomousDatabaseWalletRequest], + Union[ + oracledatabase.GenerateAutonomousDatabaseWalletResponse, + Awaitable[oracledatabase.GenerateAutonomousDatabaseWalletResponse] + ]]: + raise NotImplementedError() + + @property + def list_autonomous_db_versions(self) -> Callable[ + [oracledatabase.ListAutonomousDbVersionsRequest], + Union[ + oracledatabase.ListAutonomousDbVersionsResponse, + Awaitable[oracledatabase.ListAutonomousDbVersionsResponse] + ]]: + raise NotImplementedError() + + @property + def list_autonomous_database_character_sets(self) -> Callable[ + [oracledatabase.ListAutonomousDatabaseCharacterSetsRequest], + Union[ + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse, + Awaitable[oracledatabase.ListAutonomousDatabaseCharacterSetsResponse] + ]]: + raise NotImplementedError() + + @property + def list_autonomous_database_backups(self) -> Callable[ + [oracledatabase.ListAutonomousDatabaseBackupsRequest], + Union[ + oracledatabase.ListAutonomousDatabaseBackupsResponse, + Awaitable[oracledatabase.ListAutonomousDatabaseBackupsResponse] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def get_location(self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations(self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'OracleDatabaseTransport', +) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/rest.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/rest.py new file mode 100644 index 000000000000..3b4232729794 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/rest.py @@ -0,0 +1,3204 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.api_core import operations_v1 +from google.cloud.location import locations_pb2 # type: ignore +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.cloud.oracledatabase_v1.types import autonomous_database +from google.cloud.oracledatabase_v1.types import exadata_infra +from google.cloud.oracledatabase_v1.types import oracledatabase +from google.cloud.oracledatabase_v1.types import vm_cluster +from google.longrunning import operations_pb2 # type: ignore + +from .base import OracleDatabaseTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class OracleDatabaseRestInterceptor: + """Interceptor for OracleDatabase. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the OracleDatabaseRestTransport. + + .. code-block:: python + class MyCustomOracleDatabaseInterceptor(OracleDatabaseRestInterceptor): + def pre_create_autonomous_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_autonomous_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_cloud_exadata_infrastructure(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_cloud_exadata_infrastructure(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_cloud_vm_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_cloud_vm_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_autonomous_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_autonomous_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_cloud_exadata_infrastructure(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_cloud_exadata_infrastructure(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_cloud_vm_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_cloud_vm_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_generate_autonomous_database_wallet(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_generate_autonomous_database_wallet(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_autonomous_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_autonomous_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_cloud_exadata_infrastructure(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_cloud_exadata_infrastructure(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_cloud_vm_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_cloud_vm_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_autonomous_database_backups(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_autonomous_database_backups(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_autonomous_database_character_sets(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_autonomous_database_character_sets(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_autonomous_databases(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_autonomous_databases(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_autonomous_db_versions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_autonomous_db_versions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_cloud_exadata_infrastructures(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_cloud_exadata_infrastructures(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_cloud_vm_clusters(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_cloud_vm_clusters(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_db_nodes(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_db_nodes(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_db_servers(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_db_servers(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_db_system_shapes(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_db_system_shapes(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_entitlements(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_entitlements(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_gi_versions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_gi_versions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_restore_autonomous_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_restore_autonomous_database(self, response): + logging.log(f"Received response: {response}") + return response + + transport = OracleDatabaseRestTransport(interceptor=MyCustomOracleDatabaseInterceptor()) + client = OracleDatabaseClient(transport=transport) + + + """ + def pre_create_autonomous_database(self, request: oracledatabase.CreateAutonomousDatabaseRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.CreateAutonomousDatabaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_autonomous_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_create_autonomous_database(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_autonomous_database + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + def pre_create_cloud_exadata_infrastructure(self, request: oracledatabase.CreateCloudExadataInfrastructureRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.CreateCloudExadataInfrastructureRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_cloud_exadata_infrastructure + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_create_cloud_exadata_infrastructure(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_cloud_exadata_infrastructure + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + def pre_create_cloud_vm_cluster(self, request: oracledatabase.CreateCloudVmClusterRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.CreateCloudVmClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_cloud_vm_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_create_cloud_vm_cluster(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_cloud_vm_cluster + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + def pre_delete_autonomous_database(self, request: oracledatabase.DeleteAutonomousDatabaseRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.DeleteAutonomousDatabaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_autonomous_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_delete_autonomous_database(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_autonomous_database + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + def pre_delete_cloud_exadata_infrastructure(self, request: oracledatabase.DeleteCloudExadataInfrastructureRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.DeleteCloudExadataInfrastructureRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_cloud_exadata_infrastructure + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_delete_cloud_exadata_infrastructure(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_cloud_exadata_infrastructure + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + def pre_delete_cloud_vm_cluster(self, request: oracledatabase.DeleteCloudVmClusterRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.DeleteCloudVmClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_cloud_vm_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_delete_cloud_vm_cluster(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_cloud_vm_cluster + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + def pre_generate_autonomous_database_wallet(self, request: oracledatabase.GenerateAutonomousDatabaseWalletRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.GenerateAutonomousDatabaseWalletRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for generate_autonomous_database_wallet + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_generate_autonomous_database_wallet(self, response: oracledatabase.GenerateAutonomousDatabaseWalletResponse) -> oracledatabase.GenerateAutonomousDatabaseWalletResponse: + """Post-rpc interceptor for generate_autonomous_database_wallet + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + def pre_get_autonomous_database(self, request: oracledatabase.GetAutonomousDatabaseRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.GetAutonomousDatabaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_autonomous_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_get_autonomous_database(self, response: autonomous_database.AutonomousDatabase) -> autonomous_database.AutonomousDatabase: + """Post-rpc interceptor for get_autonomous_database + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + def pre_get_cloud_exadata_infrastructure(self, request: oracledatabase.GetCloudExadataInfrastructureRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.GetCloudExadataInfrastructureRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_cloud_exadata_infrastructure + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_get_cloud_exadata_infrastructure(self, response: exadata_infra.CloudExadataInfrastructure) -> exadata_infra.CloudExadataInfrastructure: + """Post-rpc interceptor for get_cloud_exadata_infrastructure + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + def pre_get_cloud_vm_cluster(self, request: oracledatabase.GetCloudVmClusterRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.GetCloudVmClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_cloud_vm_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_get_cloud_vm_cluster(self, response: vm_cluster.CloudVmCluster) -> vm_cluster.CloudVmCluster: + """Post-rpc interceptor for get_cloud_vm_cluster + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + def pre_list_autonomous_database_backups(self, request: oracledatabase.ListAutonomousDatabaseBackupsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListAutonomousDatabaseBackupsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_autonomous_database_backups + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_autonomous_database_backups(self, response: oracledatabase.ListAutonomousDatabaseBackupsResponse) -> oracledatabase.ListAutonomousDatabaseBackupsResponse: + """Post-rpc interceptor for list_autonomous_database_backups + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + def pre_list_autonomous_database_character_sets(self, request: oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_autonomous_database_character_sets + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_autonomous_database_character_sets(self, response: oracledatabase.ListAutonomousDatabaseCharacterSetsResponse) -> oracledatabase.ListAutonomousDatabaseCharacterSetsResponse: + """Post-rpc interceptor for list_autonomous_database_character_sets + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + def pre_list_autonomous_databases(self, request: oracledatabase.ListAutonomousDatabasesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListAutonomousDatabasesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_autonomous_databases + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_autonomous_databases(self, response: oracledatabase.ListAutonomousDatabasesResponse) -> oracledatabase.ListAutonomousDatabasesResponse: + """Post-rpc interceptor for list_autonomous_databases + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + def pre_list_autonomous_db_versions(self, request: oracledatabase.ListAutonomousDbVersionsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListAutonomousDbVersionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_autonomous_db_versions + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_autonomous_db_versions(self, response: oracledatabase.ListAutonomousDbVersionsResponse) -> oracledatabase.ListAutonomousDbVersionsResponse: + """Post-rpc interceptor for list_autonomous_db_versions + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + def pre_list_cloud_exadata_infrastructures(self, request: oracledatabase.ListCloudExadataInfrastructuresRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListCloudExadataInfrastructuresRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_cloud_exadata_infrastructures + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_cloud_exadata_infrastructures(self, response: oracledatabase.ListCloudExadataInfrastructuresResponse) -> oracledatabase.ListCloudExadataInfrastructuresResponse: + """Post-rpc interceptor for list_cloud_exadata_infrastructures + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + def pre_list_cloud_vm_clusters(self, request: oracledatabase.ListCloudVmClustersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListCloudVmClustersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_cloud_vm_clusters + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_cloud_vm_clusters(self, response: oracledatabase.ListCloudVmClustersResponse) -> oracledatabase.ListCloudVmClustersResponse: + """Post-rpc interceptor for list_cloud_vm_clusters + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + def pre_list_db_nodes(self, request: oracledatabase.ListDbNodesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListDbNodesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_db_nodes + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_db_nodes(self, response: oracledatabase.ListDbNodesResponse) -> oracledatabase.ListDbNodesResponse: + """Post-rpc interceptor for list_db_nodes + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + def pre_list_db_servers(self, request: oracledatabase.ListDbServersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListDbServersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_db_servers + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_db_servers(self, response: oracledatabase.ListDbServersResponse) -> oracledatabase.ListDbServersResponse: + """Post-rpc interceptor for list_db_servers + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + def pre_list_db_system_shapes(self, request: oracledatabase.ListDbSystemShapesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListDbSystemShapesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_db_system_shapes + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_db_system_shapes(self, response: oracledatabase.ListDbSystemShapesResponse) -> oracledatabase.ListDbSystemShapesResponse: + """Post-rpc interceptor for list_db_system_shapes + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + def pre_list_entitlements(self, request: oracledatabase.ListEntitlementsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListEntitlementsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_entitlements + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_entitlements(self, response: oracledatabase.ListEntitlementsResponse) -> oracledatabase.ListEntitlementsResponse: + """Post-rpc interceptor for list_entitlements + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + def pre_list_gi_versions(self, request: oracledatabase.ListGiVersionsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListGiVersionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_gi_versions + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_gi_versions(self, response: oracledatabase.ListGiVersionsResponse) -> oracledatabase.ListGiVersionsResponse: + """Post-rpc interceptor for list_gi_versions + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + def pre_restore_autonomous_database(self, request: oracledatabase.RestoreAutonomousDatabaseRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.RestoreAutonomousDatabaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for restore_autonomous_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_restore_autonomous_database(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for restore_autonomous_database + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + def pre_list_locations( + self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + def pre_cancel_operation( + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_cancel_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + def pre_delete_operation( + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_delete_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + def pre_list_operations( + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the OracleDatabase server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the OracleDatabase server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class OracleDatabaseRestStub: + _session: AuthorizedSession + _host: str + _interceptor: OracleDatabaseRestInterceptor + + +class OracleDatabaseRestTransport(OracleDatabaseTransport): + """REST backend transport for OracleDatabase. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__(self, *, + host: str = 'oracledatabase.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[OracleDatabaseRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'oracledatabase.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or OracleDatabaseRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.CancelOperation': [ + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + ], + 'google.longrunning.Operations.DeleteOperation': [ + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.ListOperations': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1") + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + class _CreateAutonomousDatabase(OracleDatabaseRestStub): + def __hash__(self): + return hash("CreateAutonomousDatabase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "autonomousDatabaseId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: oracledatabase.CreateAutonomousDatabaseRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the create autonomous + database method over HTTP. + + Args: + request (~.oracledatabase.CreateAutonomousDatabaseRequest): + The request object. The request for ``AutonomousDatabase.Create``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/autonomousDatabases', + 'body': 'autonomous_database', + }, + ] + request, metadata = self._interceptor.pre_create_autonomous_database(request, metadata) + pb_request = oracledatabase.CreateAutonomousDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_autonomous_database(resp) + return resp + + class _CreateCloudExadataInfrastructure(OracleDatabaseRestStub): + def __hash__(self): + return hash("CreateCloudExadataInfrastructure") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "cloudExadataInfrastructureId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: oracledatabase.CreateCloudExadataInfrastructureRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the create cloud exadata + infrastructure method over HTTP. + + Args: + request (~.oracledatabase.CreateCloudExadataInfrastructureRequest): + The request object. The request for ``CloudExadataInfrastructure.Create``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/cloudExadataInfrastructures', + 'body': 'cloud_exadata_infrastructure', + }, + ] + request, metadata = self._interceptor.pre_create_cloud_exadata_infrastructure(request, metadata) + pb_request = oracledatabase.CreateCloudExadataInfrastructureRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_cloud_exadata_infrastructure(resp) + return resp + + class _CreateCloudVmCluster(OracleDatabaseRestStub): + def __hash__(self): + return hash("CreateCloudVmCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "cloudVmClusterId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: oracledatabase.CreateCloudVmClusterRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the create cloud vm cluster method over HTTP. + + Args: + request (~.oracledatabase.CreateCloudVmClusterRequest): + The request object. The request for ``CloudVmCluster.Create``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/cloudVmClusters', + 'body': 'cloud_vm_cluster', + }, + ] + request, metadata = self._interceptor.pre_create_cloud_vm_cluster(request, metadata) + pb_request = oracledatabase.CreateCloudVmClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_cloud_vm_cluster(resp) + return resp + + class _DeleteAutonomousDatabase(OracleDatabaseRestStub): + def __hash__(self): + return hash("DeleteAutonomousDatabase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: oracledatabase.DeleteAutonomousDatabaseRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete autonomous + database method over HTTP. + + Args: + request (~.oracledatabase.DeleteAutonomousDatabaseRequest): + The request object. The request for ``AutonomousDatabase.Delete``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/autonomousDatabases/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_autonomous_database(request, metadata) + pb_request = oracledatabase.DeleteAutonomousDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_autonomous_database(resp) + return resp + + class _DeleteCloudExadataInfrastructure(OracleDatabaseRestStub): + def __hash__(self): + return hash("DeleteCloudExadataInfrastructure") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: oracledatabase.DeleteCloudExadataInfrastructureRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete cloud exadata + infrastructure method over HTTP. + + Args: + request (~.oracledatabase.DeleteCloudExadataInfrastructureRequest): + The request object. The request for ``CloudExadataInfrastructure.Delete``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/cloudExadataInfrastructures/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_cloud_exadata_infrastructure(request, metadata) + pb_request = oracledatabase.DeleteCloudExadataInfrastructureRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_cloud_exadata_infrastructure(resp) + return resp + + class _DeleteCloudVmCluster(OracleDatabaseRestStub): + def __hash__(self): + return hash("DeleteCloudVmCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: oracledatabase.DeleteCloudVmClusterRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete cloud vm cluster method over HTTP. + + Args: + request (~.oracledatabase.DeleteCloudVmClusterRequest): + The request object. The request for ``CloudVmCluster.Delete``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/cloudVmClusters/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_cloud_vm_cluster(request, metadata) + pb_request = oracledatabase.DeleteCloudVmClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_cloud_vm_cluster(resp) + return resp + + class _GenerateAutonomousDatabaseWallet(OracleDatabaseRestStub): + def __hash__(self): + return hash("GenerateAutonomousDatabaseWallet") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: oracledatabase.GenerateAutonomousDatabaseWalletRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> oracledatabase.GenerateAutonomousDatabaseWalletResponse: + r"""Call the generate autonomous + database wallet method over HTTP. + + Args: + request (~.oracledatabase.GenerateAutonomousDatabaseWalletRequest): + The request object. The request for ``AutonomousDatabase.GenerateWallet``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.GenerateAutonomousDatabaseWalletResponse: + The response for ``AutonomousDatabase.GenerateWallet``. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/autonomousDatabases/*}:generateWallet', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_generate_autonomous_database_wallet(request, metadata) + pb_request = oracledatabase.GenerateAutonomousDatabaseWalletRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.GenerateAutonomousDatabaseWalletResponse() + pb_resp = oracledatabase.GenerateAutonomousDatabaseWalletResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_generate_autonomous_database_wallet(resp) + return resp + + class _GetAutonomousDatabase(OracleDatabaseRestStub): + def __hash__(self): + return hash("GetAutonomousDatabase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: oracledatabase.GetAutonomousDatabaseRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> autonomous_database.AutonomousDatabase: + r"""Call the get autonomous database method over HTTP. + + Args: + request (~.oracledatabase.GetAutonomousDatabaseRequest): + The request object. The request for ``AutonomousDatabase.Get``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.autonomous_database.AutonomousDatabase: + Details of the Autonomous Database + resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/AutonomousDatabase/ + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/autonomousDatabases/*}', + }, + ] + request, metadata = self._interceptor.pre_get_autonomous_database(request, metadata) + pb_request = oracledatabase.GetAutonomousDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = autonomous_database.AutonomousDatabase() + pb_resp = autonomous_database.AutonomousDatabase.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_autonomous_database(resp) + return resp + + class _GetCloudExadataInfrastructure(OracleDatabaseRestStub): + def __hash__(self): + return hash("GetCloudExadataInfrastructure") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: oracledatabase.GetCloudExadataInfrastructureRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> exadata_infra.CloudExadataInfrastructure: + r"""Call the get cloud exadata + infrastructure method over HTTP. + + Args: + request (~.oracledatabase.GetCloudExadataInfrastructureRequest): + The request object. The request for ``CloudExadataInfrastructure.Get``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.exadata_infra.CloudExadataInfrastructure: + Represents CloudExadataInfrastructure + resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/CloudExadataInfrastructure/ + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/cloudExadataInfrastructures/*}', + }, + ] + request, metadata = self._interceptor.pre_get_cloud_exadata_infrastructure(request, metadata) + pb_request = oracledatabase.GetCloudExadataInfrastructureRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = exadata_infra.CloudExadataInfrastructure() + pb_resp = exadata_infra.CloudExadataInfrastructure.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_cloud_exadata_infrastructure(resp) + return resp + + class _GetCloudVmCluster(OracleDatabaseRestStub): + def __hash__(self): + return hash("GetCloudVmCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: oracledatabase.GetCloudVmClusterRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> vm_cluster.CloudVmCluster: + r"""Call the get cloud vm cluster method over HTTP. + + Args: + request (~.oracledatabase.GetCloudVmClusterRequest): + The request object. The request for ``CloudVmCluster.Get``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.vm_cluster.CloudVmCluster: + Details of the Cloud VM Cluster + resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/CloudVmCluster/ + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/cloudVmClusters/*}', + }, + ] + request, metadata = self._interceptor.pre_get_cloud_vm_cluster(request, metadata) + pb_request = oracledatabase.GetCloudVmClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = vm_cluster.CloudVmCluster() + pb_resp = vm_cluster.CloudVmCluster.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_cloud_vm_cluster(resp) + return resp + + class _ListAutonomousDatabaseBackups(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListAutonomousDatabaseBackups") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: oracledatabase.ListAutonomousDatabaseBackupsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> oracledatabase.ListAutonomousDatabaseBackupsResponse: + r"""Call the list autonomous database + backups method over HTTP. + + Args: + request (~.oracledatabase.ListAutonomousDatabaseBackupsRequest): + The request object. The request for ``AutonomousDatabaseBackup.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListAutonomousDatabaseBackupsResponse: + The response for ``AutonomousDatabaseBackup.List``. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/autonomousDatabaseBackups', + }, + ] + request, metadata = self._interceptor.pre_list_autonomous_database_backups(request, metadata) + pb_request = oracledatabase.ListAutonomousDatabaseBackupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListAutonomousDatabaseBackupsResponse() + pb_resp = oracledatabase.ListAutonomousDatabaseBackupsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_autonomous_database_backups(resp) + return resp + + class _ListAutonomousDatabaseCharacterSets(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListAutonomousDatabaseCharacterSets") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> oracledatabase.ListAutonomousDatabaseCharacterSetsResponse: + r"""Call the list autonomous database + character sets method over HTTP. + + Args: + request (~.oracledatabase.ListAutonomousDatabaseCharacterSetsRequest): + The request object. The request for ``AutonomousDatabaseCharacterSet.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListAutonomousDatabaseCharacterSetsResponse: + The response for + ``AutonomousDatabaseCharacterSet.List``. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/autonomousDatabaseCharacterSets', + }, + ] + request, metadata = self._interceptor.pre_list_autonomous_database_character_sets(request, metadata) + pb_request = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse() + pb_resp = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_autonomous_database_character_sets(resp) + return resp + + class _ListAutonomousDatabases(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListAutonomousDatabases") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: oracledatabase.ListAutonomousDatabasesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> oracledatabase.ListAutonomousDatabasesResponse: + r"""Call the list autonomous databases method over HTTP. + + Args: + request (~.oracledatabase.ListAutonomousDatabasesRequest): + The request object. The request for ``AutonomousDatabase.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListAutonomousDatabasesResponse: + The response for ``AutonomousDatabase.List``. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/autonomousDatabases', + }, + ] + request, metadata = self._interceptor.pre_list_autonomous_databases(request, metadata) + pb_request = oracledatabase.ListAutonomousDatabasesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListAutonomousDatabasesResponse() + pb_resp = oracledatabase.ListAutonomousDatabasesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_autonomous_databases(resp) + return resp + + class _ListAutonomousDbVersions(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListAutonomousDbVersions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: oracledatabase.ListAutonomousDbVersionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> oracledatabase.ListAutonomousDbVersionsResponse: + r"""Call the list autonomous db + versions method over HTTP. + + Args: + request (~.oracledatabase.ListAutonomousDbVersionsRequest): + The request object. The request for ``AutonomousDbVersion.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListAutonomousDbVersionsResponse: + The response for ``AutonomousDbVersion.List``. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/autonomousDbVersions', + }, + ] + request, metadata = self._interceptor.pre_list_autonomous_db_versions(request, metadata) + pb_request = oracledatabase.ListAutonomousDbVersionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListAutonomousDbVersionsResponse() + pb_resp = oracledatabase.ListAutonomousDbVersionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_autonomous_db_versions(resp) + return resp + + class _ListCloudExadataInfrastructures(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListCloudExadataInfrastructures") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: oracledatabase.ListCloudExadataInfrastructuresRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> oracledatabase.ListCloudExadataInfrastructuresResponse: + r"""Call the list cloud exadata + infrastructures method over HTTP. + + Args: + request (~.oracledatabase.ListCloudExadataInfrastructuresRequest): + The request object. The request for ``CloudExadataInfrastructures.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListCloudExadataInfrastructuresResponse: + The response for ``CloudExadataInfrastructures.list``. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/cloudExadataInfrastructures', + }, + ] + request, metadata = self._interceptor.pre_list_cloud_exadata_infrastructures(request, metadata) + pb_request = oracledatabase.ListCloudExadataInfrastructuresRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListCloudExadataInfrastructuresResponse() + pb_resp = oracledatabase.ListCloudExadataInfrastructuresResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_cloud_exadata_infrastructures(resp) + return resp + + class _ListCloudVmClusters(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListCloudVmClusters") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: oracledatabase.ListCloudVmClustersRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> oracledatabase.ListCloudVmClustersResponse: + r"""Call the list cloud vm clusters method over HTTP. + + Args: + request (~.oracledatabase.ListCloudVmClustersRequest): + The request object. The request for ``CloudVmCluster.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListCloudVmClustersResponse: + The response for ``CloudVmCluster.List``. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/cloudVmClusters', + }, + ] + request, metadata = self._interceptor.pre_list_cloud_vm_clusters(request, metadata) + pb_request = oracledatabase.ListCloudVmClustersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListCloudVmClustersResponse() + pb_resp = oracledatabase.ListCloudVmClustersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_cloud_vm_clusters(resp) + return resp + + class _ListDbNodes(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListDbNodes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: oracledatabase.ListDbNodesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> oracledatabase.ListDbNodesResponse: + r"""Call the list db nodes method over HTTP. + + Args: + request (~.oracledatabase.ListDbNodesRequest): + The request object. The request for ``DbNode.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListDbNodesResponse: + The response for ``DbNode.List``. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*/cloudVmClusters/*}/dbNodes', + }, + ] + request, metadata = self._interceptor.pre_list_db_nodes(request, metadata) + pb_request = oracledatabase.ListDbNodesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListDbNodesResponse() + pb_resp = oracledatabase.ListDbNodesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_db_nodes(resp) + return resp + + class _ListDbServers(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListDbServers") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: oracledatabase.ListDbServersRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> oracledatabase.ListDbServersResponse: + r"""Call the list db servers method over HTTP. + + Args: + request (~.oracledatabase.ListDbServersRequest): + The request object. The request for ``DbServer.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListDbServersResponse: + The response for ``DbServer.List``. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*/cloudExadataInfrastructures/*}/dbServers', + }, + ] + request, metadata = self._interceptor.pre_list_db_servers(request, metadata) + pb_request = oracledatabase.ListDbServersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListDbServersResponse() + pb_resp = oracledatabase.ListDbServersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_db_servers(resp) + return resp + + class _ListDbSystemShapes(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListDbSystemShapes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: oracledatabase.ListDbSystemShapesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> oracledatabase.ListDbSystemShapesResponse: + r"""Call the list db system shapes method over HTTP. + + Args: + request (~.oracledatabase.ListDbSystemShapesRequest): + The request object. The request for ``DbSystemShape.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListDbSystemShapesResponse: + The response for ``DbSystemShape.List``. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/dbSystemShapes', + }, + ] + request, metadata = self._interceptor.pre_list_db_system_shapes(request, metadata) + pb_request = oracledatabase.ListDbSystemShapesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListDbSystemShapesResponse() + pb_resp = oracledatabase.ListDbSystemShapesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_db_system_shapes(resp) + return resp + + class _ListEntitlements(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListEntitlements") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: oracledatabase.ListEntitlementsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> oracledatabase.ListEntitlementsResponse: + r"""Call the list entitlements method over HTTP. + + Args: + request (~.oracledatabase.ListEntitlementsRequest): + The request object. The request for ``Entitlement.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListEntitlementsResponse: + The response for ``Entitlement.List``. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/entitlements', + }, + ] + request, metadata = self._interceptor.pre_list_entitlements(request, metadata) + pb_request = oracledatabase.ListEntitlementsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListEntitlementsResponse() + pb_resp = oracledatabase.ListEntitlementsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_entitlements(resp) + return resp + + class _ListGiVersions(OracleDatabaseRestStub): + def __hash__(self): + return hash("ListGiVersions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: oracledatabase.ListGiVersionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> oracledatabase.ListGiVersionsResponse: + r"""Call the list gi versions method over HTTP. + + Args: + request (~.oracledatabase.ListGiVersionsRequest): + The request object. The request for ``GiVersion.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListGiVersionsResponse: + The response for ``GiVersion.List``. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/giVersions', + }, + ] + request, metadata = self._interceptor.pre_list_gi_versions(request, metadata) + pb_request = oracledatabase.ListGiVersionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = oracledatabase.ListGiVersionsResponse() + pb_resp = oracledatabase.ListGiVersionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_gi_versions(resp) + return resp + + class _RestoreAutonomousDatabase(OracleDatabaseRestStub): + def __hash__(self): + return hash("RestoreAutonomousDatabase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: oracledatabase.RestoreAutonomousDatabaseRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the restore autonomous + database method over HTTP. + + Args: + request (~.oracledatabase.RestoreAutonomousDatabaseRequest): + The request object. The request for ``AutonomousDatabase.Restore``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/autonomousDatabases/*}:restore', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_restore_autonomous_database(request, metadata) + pb_request = oracledatabase.RestoreAutonomousDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_restore_autonomous_database(resp) + return resp + + @property + def create_autonomous_database(self) -> Callable[ + [oracledatabase.CreateAutonomousDatabaseRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateAutonomousDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_cloud_exadata_infrastructure(self) -> Callable[ + [oracledatabase.CreateCloudExadataInfrastructureRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateCloudExadataInfrastructure(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_cloud_vm_cluster(self) -> Callable[ + [oracledatabase.CreateCloudVmClusterRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateCloudVmCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_autonomous_database(self) -> Callable[ + [oracledatabase.DeleteAutonomousDatabaseRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteAutonomousDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_cloud_exadata_infrastructure(self) -> Callable[ + [oracledatabase.DeleteCloudExadataInfrastructureRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteCloudExadataInfrastructure(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_cloud_vm_cluster(self) -> Callable[ + [oracledatabase.DeleteCloudVmClusterRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteCloudVmCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def generate_autonomous_database_wallet(self) -> Callable[ + [oracledatabase.GenerateAutonomousDatabaseWalletRequest], + oracledatabase.GenerateAutonomousDatabaseWalletResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GenerateAutonomousDatabaseWallet(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_autonomous_database(self) -> Callable[ + [oracledatabase.GetAutonomousDatabaseRequest], + autonomous_database.AutonomousDatabase]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAutonomousDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_cloud_exadata_infrastructure(self) -> Callable[ + [oracledatabase.GetCloudExadataInfrastructureRequest], + exadata_infra.CloudExadataInfrastructure]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetCloudExadataInfrastructure(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_cloud_vm_cluster(self) -> Callable[ + [oracledatabase.GetCloudVmClusterRequest], + vm_cluster.CloudVmCluster]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetCloudVmCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_autonomous_database_backups(self) -> Callable[ + [oracledatabase.ListAutonomousDatabaseBackupsRequest], + oracledatabase.ListAutonomousDatabaseBackupsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAutonomousDatabaseBackups(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_autonomous_database_character_sets(self) -> Callable[ + [oracledatabase.ListAutonomousDatabaseCharacterSetsRequest], + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAutonomousDatabaseCharacterSets(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_autonomous_databases(self) -> Callable[ + [oracledatabase.ListAutonomousDatabasesRequest], + oracledatabase.ListAutonomousDatabasesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAutonomousDatabases(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_autonomous_db_versions(self) -> Callable[ + [oracledatabase.ListAutonomousDbVersionsRequest], + oracledatabase.ListAutonomousDbVersionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAutonomousDbVersions(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_cloud_exadata_infrastructures(self) -> Callable[ + [oracledatabase.ListCloudExadataInfrastructuresRequest], + oracledatabase.ListCloudExadataInfrastructuresResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListCloudExadataInfrastructures(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_cloud_vm_clusters(self) -> Callable[ + [oracledatabase.ListCloudVmClustersRequest], + oracledatabase.ListCloudVmClustersResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListCloudVmClusters(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_db_nodes(self) -> Callable[ + [oracledatabase.ListDbNodesRequest], + oracledatabase.ListDbNodesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDbNodes(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_db_servers(self) -> Callable[ + [oracledatabase.ListDbServersRequest], + oracledatabase.ListDbServersResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDbServers(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_db_system_shapes(self) -> Callable[ + [oracledatabase.ListDbSystemShapesRequest], + oracledatabase.ListDbSystemShapesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDbSystemShapes(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_entitlements(self) -> Callable[ + [oracledatabase.ListEntitlementsRequest], + oracledatabase.ListEntitlementsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListEntitlements(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_gi_versions(self) -> Callable[ + [oracledatabase.ListGiVersionsRequest], + oracledatabase.ListGiVersionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListGiVersions(self._session, self._host, self._interceptor) # type: ignore + + @property + def restore_autonomous_database(self) -> Callable[ + [oracledatabase.RestoreAutonomousDatabaseRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RestoreAutonomousDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(OracleDatabaseRestStub): + def __call__(self, + request: locations_pb2.GetLocationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}', + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(OracleDatabaseRestStub): + def __call__(self, + request: locations_pb2.ListLocationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*}/locations', + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(OracleDatabaseRestStub): + def __call__(self, + request: operations_pb2.CancelOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + body = json.dumps(transcoded_request['body']) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(OracleDatabaseRestStub): + def __call__(self, + request: operations_pb2.DeleteOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ] + + request, metadata = self._interceptor.pre_delete_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(OracleDatabaseRestStub): + def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(OracleDatabaseRestStub): + def __call__(self, + request: operations_pb2.ListOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'OracleDatabaseRestTransport', +) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/__init__.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/__init__.py new file mode 100644 index 000000000000..186fe76fa7d5 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/__init__.py @@ -0,0 +1,182 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .autonomous_database import ( + AllConnectionStrings, + AutonomousDatabase, + AutonomousDatabaseApex, + AutonomousDatabaseConnectionStrings, + AutonomousDatabaseConnectionUrls, + AutonomousDatabaseProperties, + AutonomousDatabaseStandbySummary, + DatabaseConnectionStringProfile, + ScheduledOperationDetails, + DBWorkload, + GenerateType, + OperationsInsightsState, + State, +) +from .autonomous_database_character_set import ( + AutonomousDatabaseCharacterSet, +) +from .autonomous_db_backup import ( + AutonomousDatabaseBackup, + AutonomousDatabaseBackupProperties, +) +from .autonomous_db_version import ( + AutonomousDbVersion, +) +from .common import ( + CustomerContact, +) +from .db_node import ( + DbNode, + DbNodeProperties, +) +from .db_server import ( + DbServer, + DbServerProperties, +) +from .db_system_shape import ( + DbSystemShape, +) +from .entitlement import ( + CloudAccountDetails, + Entitlement, +) +from .exadata_infra import ( + CloudExadataInfrastructure, + CloudExadataInfrastructureProperties, + MaintenanceWindow, +) +from .gi_version import ( + GiVersion, +) +from .location_metadata import ( + LocationMetadata, +) +from .oracledatabase import ( + CreateAutonomousDatabaseRequest, + CreateCloudExadataInfrastructureRequest, + CreateCloudVmClusterRequest, + DeleteAutonomousDatabaseRequest, + DeleteCloudExadataInfrastructureRequest, + DeleteCloudVmClusterRequest, + GenerateAutonomousDatabaseWalletRequest, + GenerateAutonomousDatabaseWalletResponse, + GetAutonomousDatabaseRequest, + GetCloudExadataInfrastructureRequest, + GetCloudVmClusterRequest, + ListAutonomousDatabaseBackupsRequest, + ListAutonomousDatabaseBackupsResponse, + ListAutonomousDatabaseCharacterSetsRequest, + ListAutonomousDatabaseCharacterSetsResponse, + ListAutonomousDatabasesRequest, + ListAutonomousDatabasesResponse, + ListAutonomousDbVersionsRequest, + ListAutonomousDbVersionsResponse, + ListCloudExadataInfrastructuresRequest, + ListCloudExadataInfrastructuresResponse, + ListCloudVmClustersRequest, + ListCloudVmClustersResponse, + ListDbNodesRequest, + ListDbNodesResponse, + ListDbServersRequest, + ListDbServersResponse, + ListDbSystemShapesRequest, + ListDbSystemShapesResponse, + ListEntitlementsRequest, + ListEntitlementsResponse, + ListGiVersionsRequest, + ListGiVersionsResponse, + OperationMetadata, + RestoreAutonomousDatabaseRequest, +) +from .vm_cluster import ( + CloudVmCluster, + CloudVmClusterProperties, + DataCollectionOptions, +) + +__all__ = ( + 'AllConnectionStrings', + 'AutonomousDatabase', + 'AutonomousDatabaseApex', + 'AutonomousDatabaseConnectionStrings', + 'AutonomousDatabaseConnectionUrls', + 'AutonomousDatabaseProperties', + 'AutonomousDatabaseStandbySummary', + 'DatabaseConnectionStringProfile', + 'ScheduledOperationDetails', + 'DBWorkload', + 'GenerateType', + 'OperationsInsightsState', + 'State', + 'AutonomousDatabaseCharacterSet', + 'AutonomousDatabaseBackup', + 'AutonomousDatabaseBackupProperties', + 'AutonomousDbVersion', + 'CustomerContact', + 'DbNode', + 'DbNodeProperties', + 'DbServer', + 'DbServerProperties', + 'DbSystemShape', + 'CloudAccountDetails', + 'Entitlement', + 'CloudExadataInfrastructure', + 'CloudExadataInfrastructureProperties', + 'MaintenanceWindow', + 'GiVersion', + 'LocationMetadata', + 'CreateAutonomousDatabaseRequest', + 'CreateCloudExadataInfrastructureRequest', + 'CreateCloudVmClusterRequest', + 'DeleteAutonomousDatabaseRequest', + 'DeleteCloudExadataInfrastructureRequest', + 'DeleteCloudVmClusterRequest', + 'GenerateAutonomousDatabaseWalletRequest', + 'GenerateAutonomousDatabaseWalletResponse', + 'GetAutonomousDatabaseRequest', + 'GetCloudExadataInfrastructureRequest', + 'GetCloudVmClusterRequest', + 'ListAutonomousDatabaseBackupsRequest', + 'ListAutonomousDatabaseBackupsResponse', + 'ListAutonomousDatabaseCharacterSetsRequest', + 'ListAutonomousDatabaseCharacterSetsResponse', + 'ListAutonomousDatabasesRequest', + 'ListAutonomousDatabasesResponse', + 'ListAutonomousDbVersionsRequest', + 'ListAutonomousDbVersionsResponse', + 'ListCloudExadataInfrastructuresRequest', + 'ListCloudExadataInfrastructuresResponse', + 'ListCloudVmClustersRequest', + 'ListCloudVmClustersResponse', + 'ListDbNodesRequest', + 'ListDbNodesResponse', + 'ListDbServersRequest', + 'ListDbServersResponse', + 'ListDbSystemShapesRequest', + 'ListDbSystemShapesResponse', + 'ListEntitlementsRequest', + 'ListEntitlementsResponse', + 'ListGiVersionsRequest', + 'ListGiVersionsResponse', + 'OperationMetadata', + 'RestoreAutonomousDatabaseRequest', + 'CloudVmCluster', + 'CloudVmClusterProperties', + 'DataCollectionOptions', +) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_database.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_database.py new file mode 100644 index 000000000000..f1e5c317c8ac --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_database.py @@ -0,0 +1,1418 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.oracledatabase_v1.types import common +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.oracledatabase.v1', + manifest={ + 'GenerateType', + 'State', + 'OperationsInsightsState', + 'DBWorkload', + 'AutonomousDatabase', + 'AutonomousDatabaseProperties', + 'AutonomousDatabaseApex', + 'AutonomousDatabaseConnectionStrings', + 'DatabaseConnectionStringProfile', + 'AllConnectionStrings', + 'AutonomousDatabaseConnectionUrls', + 'AutonomousDatabaseStandbySummary', + 'ScheduledOperationDetails', + }, +) + + +class GenerateType(proto.Enum): + r"""The type of wallet generation. + + Values: + GENERATE_TYPE_UNSPECIFIED (0): + Default unspecified value. + ALL (1): + Used to generate wallet for all databases in + the region. + SINGLE (2): + Used to generate wallet for a single + database. + """ + GENERATE_TYPE_UNSPECIFIED = 0 + ALL = 1 + SINGLE = 2 + + +class State(proto.Enum): + r"""The various lifecycle states of the Autonomous Database. + + Values: + STATE_UNSPECIFIED (0): + Default unspecified value. + PROVISIONING (1): + Indicates that the Autonomous Database is in + provisioning state. + AVAILABLE (2): + Indicates that the Autonomous Database is in + available state. + STOPPING (3): + Indicates that the Autonomous Database is in + stopping state. + STOPPED (4): + Indicates that the Autonomous Database is in + stopped state. + STARTING (5): + Indicates that the Autonomous Database is in + starting state. + TERMINATING (6): + Indicates that the Autonomous Database is in + terminating state. + TERMINATED (7): + Indicates that the Autonomous Database is in + terminated state. + UNAVAILABLE (8): + Indicates that the Autonomous Database is in + unavailable state. + RESTORE_IN_PROGRESS (9): + Indicates that the Autonomous Database + restore is in progress. + RESTORE_FAILED (10): + Indicates that the Autonomous Database failed + to restore. + BACKUP_IN_PROGRESS (11): + Indicates that the Autonomous Database backup + is in progress. + SCALE_IN_PROGRESS (12): + Indicates that the Autonomous Database scale + is in progress. + AVAILABLE_NEEDS_ATTENTION (13): + Indicates that the Autonomous Database is + available but needs attention state. + UPDATING (14): + Indicates that the Autonomous Database is in + updating state. + MAINTENANCE_IN_PROGRESS (15): + Indicates that the Autonomous Database's + maintenance is in progress state. + RESTARTING (16): + Indicates that the Autonomous Database is in + restarting state. + RECREATING (17): + Indicates that the Autonomous Database is in + recreating state. + ROLE_CHANGE_IN_PROGRESS (18): + Indicates that the Autonomous Database's role + change is in progress state. + UPGRADING (19): + Indicates that the Autonomous Database is in + upgrading state. + INACCESSIBLE (20): + Indicates that the Autonomous Database is in + inaccessible state. + STANDBY (21): + Indicates that the Autonomous Database is in + standby state. + """ + STATE_UNSPECIFIED = 0 + PROVISIONING = 1 + AVAILABLE = 2 + STOPPING = 3 + STOPPED = 4 + STARTING = 5 + TERMINATING = 6 + TERMINATED = 7 + UNAVAILABLE = 8 + RESTORE_IN_PROGRESS = 9 + RESTORE_FAILED = 10 + BACKUP_IN_PROGRESS = 11 + SCALE_IN_PROGRESS = 12 + AVAILABLE_NEEDS_ATTENTION = 13 + UPDATING = 14 + MAINTENANCE_IN_PROGRESS = 15 + RESTARTING = 16 + RECREATING = 17 + ROLE_CHANGE_IN_PROGRESS = 18 + UPGRADING = 19 + INACCESSIBLE = 20 + STANDBY = 21 + + +class OperationsInsightsState(proto.Enum): + r"""The state of the Operations Insights for this Autonomous + Database. + + Values: + OPERATIONS_INSIGHTS_STATE_UNSPECIFIED (0): + Default unspecified value. + ENABLING (1): + Enabling status for operation insights. + ENABLED (2): + Enabled status for operation insights. + DISABLING (3): + Disabling status for operation insights. + NOT_ENABLED (4): + Not Enabled status for operation insights. + FAILED_ENABLING (5): + Failed enabling status for operation + insights. + FAILED_DISABLING (6): + Failed disabling status for operation + insights. + """ + OPERATIONS_INSIGHTS_STATE_UNSPECIFIED = 0 + ENABLING = 1 + ENABLED = 2 + DISABLING = 3 + NOT_ENABLED = 4 + FAILED_ENABLING = 5 + FAILED_DISABLING = 6 + + +class DBWorkload(proto.Enum): + r"""The various states available for the Autonomous Database + workload type. + + Values: + DB_WORKLOAD_UNSPECIFIED (0): + Default unspecified value. + OLTP (1): + Autonomous Transaction Processing database. + DW (2): + Autonomous Data Warehouse database. + AJD (3): + Autonomous JSON Database. + APEX (4): + Autonomous Database with the Oracle APEX + Application Development workload type. + """ + DB_WORKLOAD_UNSPECIFIED = 0 + OLTP = 1 + DW = 2 + AJD = 3 + APEX = 4 + + +class AutonomousDatabase(proto.Message): + r"""Details of the Autonomous Database resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/AutonomousDatabase/ + + Attributes: + name (str): + Identifier. The name of the Autonomous Database resource in + the following format: + projects/{project}/locations/{region}/autonomousDatabases/{autonomous_database} + database (str): + Optional. The name of the Autonomous + Database. The database name must be unique in + the project. The name must begin with a letter + and can contain a maximum of 30 alphanumeric + characters. + display_name (str): + Optional. The display name for the Autonomous + Database. The name does not have to be unique + within your project. + entitlement_id (str): + Output only. The ID of the subscription + entitlement associated with the Autonomous + Database. + admin_password (str): + Optional. The password for the default ADMIN + user. + properties (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties): + Optional. The properties of the Autonomous + Database. + labels (MutableMapping[str, str]): + Optional. The labels or tags associated with + the Autonomous Database. + network (str): + Required. The name of the VPC network used by + the Autonomous Database in the following format: + projects/{project}/global/networks/{network} + cidr (str): + Required. The subnet CIDR range for the + Autonmous Database. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The date and time that the + Autonomous Database was created. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + database: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + entitlement_id: str = proto.Field( + proto.STRING, + number=5, + ) + admin_password: str = proto.Field( + proto.STRING, + number=6, + ) + properties: 'AutonomousDatabaseProperties' = proto.Field( + proto.MESSAGE, + number=7, + message='AutonomousDatabaseProperties', + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=8, + ) + network: str = proto.Field( + proto.STRING, + number=9, + ) + cidr: str = proto.Field( + proto.STRING, + number=10, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=13, + message=timestamp_pb2.Timestamp, + ) + + +class AutonomousDatabaseProperties(proto.Message): + r"""The properties of an Autonomous Database. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + ocid (str): + Output only. OCID of the Autonomous Database. + https://docs.oracle.com/en-us/iaas/Content/General/Concepts/identifiers.htm#Oracle + compute_count (float): + Optional. The number of compute servers for + the Autonomous Database. + cpu_core_count (int): + Optional. The number of CPU cores to be made + available to the database. + data_storage_size_tb (int): + Optional. The size of the data stored in the + database, in terabytes. + data_storage_size_gb (int): + Optional. The size of the data stored in the + database, in gigabytes. + db_workload (google.cloud.oracledatabase_v1.types.DBWorkload): + Required. The workload type of the Autonomous + Database. + db_edition (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.DatabaseEdition): + Optional. The edition of the Autonomous + Databases. + character_set (str): + Optional. The character set for the + Autonomous Database. The default is AL32UTF8. + n_character_set (str): + Optional. The national character set for the + Autonomous Database. The default is AL16UTF16. + private_endpoint_ip (str): + Optional. The private endpoint IP address for + the Autonomous Database. + private_endpoint_label (str): + Optional. The private endpoint label for the + Autonomous Database. + db_version (str): + Optional. The Oracle Database version for the + Autonomous Database. + is_auto_scaling_enabled (bool): + Optional. This field indicates if auto + scaling is enabled for the Autonomous Database + CPU core count. + is_storage_auto_scaling_enabled (bool): + Optional. This field indicates if auto + scaling is enabled for the Autonomous Database + storage. + license_type (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.LicenseType): + Required. The license type used for the + Autonomous Database. + customer_contacts (MutableSequence[google.cloud.oracledatabase_v1.types.CustomerContact]): + Optional. The list of customer contacts. + secret_id (str): + Optional. The ID of the Oracle Cloud + Infrastructure vault secret. + vault_id (str): + Optional. The ID of the Oracle Cloud + Infrastructure vault. + maintenance_schedule_type (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.MaintenanceScheduleType): + Optional. The maintenance schedule of the + Autonomous Database. + mtls_connection_required (bool): + Optional. This field specifies if the + Autonomous Database requires mTLS connections. + backup_retention_period_days (int): + Optional. The retention period for the + Autonomous Database. This field is specified in + days, can range from 1 day to 60 days, and has a + default value of 60 days. + actual_used_data_storage_size_tb (float): + Output only. The amount of storage currently + being used for user and system data, in + terabytes. + allocated_storage_size_tb (float): + Output only. The amount of storage currently + allocated for the database tables and billed + for, rounded up in terabytes. + apex_details (google.cloud.oracledatabase_v1.types.AutonomousDatabaseApex): + Output only. The details for the Oracle APEX + Application Development. + are_primary_allowlisted_ips_used (bool): + Output only. This field indicates the status + of Data Guard and Access control for the + Autonomous Database. The field's value is null + if Data Guard is disabled or Access Control is + disabled. The field's value is TRUE if both Data + Guard and Access Control are enabled, and the + Autonomous Database is using primary IP access + control list (ACL) for standby. The field's + value is FALSE if both Data Guard and Access + Control are enabled, and the Autonomous Database + is using a different IP access control list + (ACL) for standby compared to primary. + + This field is a member of `oneof`_ ``_are_primary_allowlisted_ips_used``. + lifecycle_details (str): + Output only. The details of the current + lifestyle state of the Autonomous Database. + state (google.cloud.oracledatabase_v1.types.State): + Output only. The current lifecycle state of + the Autonomous Database. + autonomous_container_database_id (str): + Output only. The Autonomous Container + Database OCID. + available_upgrade_versions (MutableSequence[str]): + Output only. The list of available Oracle + Database upgrade versions for an Autonomous + Database. + connection_strings (google.cloud.oracledatabase_v1.types.AutonomousDatabaseConnectionStrings): + Output only. The connection strings used to + connect to an Autonomous Database. + connection_urls (google.cloud.oracledatabase_v1.types.AutonomousDatabaseConnectionUrls): + Output only. The Oracle Connection URLs for + an Autonomous Database. + failed_data_recovery_duration (google.protobuf.duration_pb2.Duration): + Output only. This field indicates the number + of seconds of data loss during a Data Guard + failover. + memory_table_gbs (int): + Output only. The memory assigned to in-memory + tables in an Autonomous Database. + is_local_data_guard_enabled (bool): + Output only. This field indicates whether the + Autonomous Database has local (in-region) Data + Guard enabled. + local_adg_auto_failover_max_data_loss_limit (int): + Output only. This field indicates the maximum + data loss limit for an Autonomous Database, in + seconds. + local_standby_db (google.cloud.oracledatabase_v1.types.AutonomousDatabaseStandbySummary): + Output only. The details of the Autonomous + Data Guard standby database. + memory_per_oracle_compute_unit_gbs (int): + Output only. The amount of memory enabled per + ECPU, in gigabytes. + local_disaster_recovery_type (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.LocalDisasterRecoveryType): + Output only. This field indicates the local + disaster recovery (DR) type of an Autonomous + Database. + data_safe_state (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.DataSafeState): + Output only. The current state of the Data + Safe registration for the Autonomous Database. + database_management_state (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.DatabaseManagementState): + Output only. The current state of database + management for the Autonomous Database. + open_mode (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.OpenMode): + Output only. This field indicates the current + mode of the Autonomous Database. + operations_insights_state (google.cloud.oracledatabase_v1.types.OperationsInsightsState): + Output only. This field indicates the state + of Operations Insights for the Autonomous + Database. + peer_db_ids (MutableSequence[str]): + Output only. The list of OCIDs of standby + databases located in Autonomous Data Guard + remote regions that are associated with the + source database. + permission_level (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.PermissionLevel): + Output only. The permission level of the + Autonomous Database. + private_endpoint (str): + Output only. The private endpoint for the + Autonomous Database. + refreshable_mode (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.RefreshableMode): + Output only. The refresh mode of the cloned + Autonomous Database. + refreshable_state (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.RefreshableState): + Output only. The refresh State of the clone. + role (google.cloud.oracledatabase_v1.types.AutonomousDatabaseProperties.Role): + Output only. The Data Guard role of the + Autonomous Database. + scheduled_operation_details (MutableSequence[google.cloud.oracledatabase_v1.types.ScheduledOperationDetails]): + Output only. The list and details of the + scheduled operations of the Autonomous Database. + sql_web_developer_url (str): + Output only. The SQL Web Developer URL for + the Autonomous Database. + supported_clone_regions (MutableSequence[str]): + Output only. The list of available regions + that can be used to create a clone for the + Autonomous Database. + used_data_storage_size_tbs (int): + Output only. The storage space used by + Autonomous Database, in gigabytes. + oci_url (str): + Output only. The Oracle Cloud Infrastructure + link for the Autonomous Database. + total_auto_backup_storage_size_gbs (float): + Output only. The storage space used by + automatic backups of Autonomous Database, in + gigabytes. + next_long_term_backup_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The long term backup schedule of + the Autonomous Database. + maintenance_begin_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The date and time when + maintenance will begin. + maintenance_end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The date and time when + maintenance will end. + """ + class DatabaseEdition(proto.Enum): + r"""The editions available for the Autonomous Database. + + Values: + DATABASE_EDITION_UNSPECIFIED (0): + Default unspecified value. + STANDARD_EDITION (1): + Standard Database Edition + ENTERPRISE_EDITION (2): + Enterprise Database Edition + """ + DATABASE_EDITION_UNSPECIFIED = 0 + STANDARD_EDITION = 1 + ENTERPRISE_EDITION = 2 + + class LicenseType(proto.Enum): + r"""The license types available for the Autonomous Database. + + Values: + LICENSE_TYPE_UNSPECIFIED (0): + Unspecified + LICENSE_INCLUDED (1): + License included part of offer + BRING_YOUR_OWN_LICENSE (2): + Bring your own license + """ + LICENSE_TYPE_UNSPECIFIED = 0 + LICENSE_INCLUDED = 1 + BRING_YOUR_OWN_LICENSE = 2 + + class MaintenanceScheduleType(proto.Enum): + r"""The available maintenance schedules for the Autonomous + Database. + + Values: + MAINTENANCE_SCHEDULE_TYPE_UNSPECIFIED (0): + Default unspecified value. + EARLY (1): + An EARLY maintenance schedule patches the + database before the regular scheduled + maintenance. + REGULAR (2): + A REGULAR maintenance schedule follows the + normal maintenance cycle. + """ + MAINTENANCE_SCHEDULE_TYPE_UNSPECIFIED = 0 + EARLY = 1 + REGULAR = 2 + + class LocalDisasterRecoveryType(proto.Enum): + r"""The types of local disaster recovery available for an + Autonomous Database. + + Values: + LOCAL_DISASTER_RECOVERY_TYPE_UNSPECIFIED (0): + Default unspecified value. + ADG (1): + Autonomous Data Guard recovery. + BACKUP_BASED (2): + Backup based recovery. + """ + LOCAL_DISASTER_RECOVERY_TYPE_UNSPECIFIED = 0 + ADG = 1 + BACKUP_BASED = 2 + + class DataSafeState(proto.Enum): + r"""Varies states of the Data Safe registration for the + Autonomous Database. + + Values: + DATA_SAFE_STATE_UNSPECIFIED (0): + Default unspecified value. + REGISTERING (1): + Registering data safe state. + REGISTERED (2): + Registered data safe state. + DEREGISTERING (3): + Deregistering data safe state. + NOT_REGISTERED (4): + Not registered data safe state. + FAILED (5): + Failed data safe state. + """ + DATA_SAFE_STATE_UNSPECIFIED = 0 + REGISTERING = 1 + REGISTERED = 2 + DEREGISTERING = 3 + NOT_REGISTERED = 4 + FAILED = 5 + + class DatabaseManagementState(proto.Enum): + r"""The different states of database management for an Autonomous + Database. + + Values: + DATABASE_MANAGEMENT_STATE_UNSPECIFIED (0): + Default unspecified value. + ENABLING (1): + Enabling Database Management state + ENABLED (2): + Enabled Database Management state + DISABLING (3): + Disabling Database Management state + NOT_ENABLED (4): + Not Enabled Database Management state + FAILED_ENABLING (5): + Failed enabling Database Management state + FAILED_DISABLING (6): + Failed disabling Database Management state + """ + DATABASE_MANAGEMENT_STATE_UNSPECIFIED = 0 + ENABLING = 1 + ENABLED = 2 + DISABLING = 3 + NOT_ENABLED = 4 + FAILED_ENABLING = 5 + FAILED_DISABLING = 6 + + class OpenMode(proto.Enum): + r"""This field indicates the modes of an Autonomous Database. + + Values: + OPEN_MODE_UNSPECIFIED (0): + Default unspecified value. + READ_ONLY (1): + Read Only Mode + READ_WRITE (2): + Read Write Mode + """ + OPEN_MODE_UNSPECIFIED = 0 + READ_ONLY = 1 + READ_WRITE = 2 + + class PermissionLevel(proto.Enum): + r"""The types of permission levels for an Autonomous Database. + + Values: + PERMISSION_LEVEL_UNSPECIFIED (0): + Default unspecified value. + RESTRICTED (1): + Restricted mode allows access only by admin + users. + UNRESTRICTED (2): + Normal access. + """ + PERMISSION_LEVEL_UNSPECIFIED = 0 + RESTRICTED = 1 + UNRESTRICTED = 2 + + class RefreshableMode(proto.Enum): + r"""The refresh mode of the cloned Autonomous Database. + + Values: + REFRESHABLE_MODE_UNSPECIFIED (0): + The default unspecified value. + AUTOMATIC (1): + AUTOMATIC indicates that the cloned database + is automatically refreshed with data from the + source Autonomous Database. + MANUAL (2): + MANUAL indicates that the cloned database is + manually refreshed with data from the source + Autonomous Database. + """ + REFRESHABLE_MODE_UNSPECIFIED = 0 + AUTOMATIC = 1 + MANUAL = 2 + + class RefreshableState(proto.Enum): + r"""The refresh state of the cloned Autonomous Database. + + Values: + REFRESHABLE_STATE_UNSPECIFIED (0): + Default unspecified value. + REFRESHING (1): + Refreshing + NOT_REFRESHING (2): + Not refreshed + """ + REFRESHABLE_STATE_UNSPECIFIED = 0 + REFRESHING = 1 + NOT_REFRESHING = 2 + + class Role(proto.Enum): + r"""The Data Guard role of the Autonomous Database. + + Values: + ROLE_UNSPECIFIED (0): + Default unspecified value. + PRIMARY (1): + Primary role + STANDBY (2): + Standby role + DISABLED_STANDBY (3): + Disabled standby role + BACKUP_COPY (4): + Backup copy role + SNAPSHOT_STANDBY (5): + Snapshot standby role + """ + ROLE_UNSPECIFIED = 0 + PRIMARY = 1 + STANDBY = 2 + DISABLED_STANDBY = 3 + BACKUP_COPY = 4 + SNAPSHOT_STANDBY = 5 + + ocid: str = proto.Field( + proto.STRING, + number=1, + ) + compute_count: float = proto.Field( + proto.FLOAT, + number=2, + ) + cpu_core_count: int = proto.Field( + proto.INT32, + number=3, + ) + data_storage_size_tb: int = proto.Field( + proto.INT32, + number=4, + ) + data_storage_size_gb: int = proto.Field( + proto.INT32, + number=63, + ) + db_workload: 'DBWorkload' = proto.Field( + proto.ENUM, + number=5, + enum='DBWorkload', + ) + db_edition: DatabaseEdition = proto.Field( + proto.ENUM, + number=6, + enum=DatabaseEdition, + ) + character_set: str = proto.Field( + proto.STRING, + number=8, + ) + n_character_set: str = proto.Field( + proto.STRING, + number=9, + ) + private_endpoint_ip: str = proto.Field( + proto.STRING, + number=10, + ) + private_endpoint_label: str = proto.Field( + proto.STRING, + number=11, + ) + db_version: str = proto.Field( + proto.STRING, + number=12, + ) + is_auto_scaling_enabled: bool = proto.Field( + proto.BOOL, + number=14, + ) + is_storage_auto_scaling_enabled: bool = proto.Field( + proto.BOOL, + number=15, + ) + license_type: LicenseType = proto.Field( + proto.ENUM, + number=16, + enum=LicenseType, + ) + customer_contacts: MutableSequence[common.CustomerContact] = proto.RepeatedField( + proto.MESSAGE, + number=17, + message=common.CustomerContact, + ) + secret_id: str = proto.Field( + proto.STRING, + number=18, + ) + vault_id: str = proto.Field( + proto.STRING, + number=19, + ) + maintenance_schedule_type: MaintenanceScheduleType = proto.Field( + proto.ENUM, + number=20, + enum=MaintenanceScheduleType, + ) + mtls_connection_required: bool = proto.Field( + proto.BOOL, + number=34, + ) + backup_retention_period_days: int = proto.Field( + proto.INT32, + number=57, + ) + actual_used_data_storage_size_tb: float = proto.Field( + proto.DOUBLE, + number=21, + ) + allocated_storage_size_tb: float = proto.Field( + proto.DOUBLE, + number=22, + ) + apex_details: 'AutonomousDatabaseApex' = proto.Field( + proto.MESSAGE, + number=23, + message='AutonomousDatabaseApex', + ) + are_primary_allowlisted_ips_used: bool = proto.Field( + proto.BOOL, + number=24, + optional=True, + ) + lifecycle_details: str = proto.Field( + proto.STRING, + number=25, + ) + state: 'State' = proto.Field( + proto.ENUM, + number=26, + enum='State', + ) + autonomous_container_database_id: str = proto.Field( + proto.STRING, + number=27, + ) + available_upgrade_versions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=28, + ) + connection_strings: 'AutonomousDatabaseConnectionStrings' = proto.Field( + proto.MESSAGE, + number=29, + message='AutonomousDatabaseConnectionStrings', + ) + connection_urls: 'AutonomousDatabaseConnectionUrls' = proto.Field( + proto.MESSAGE, + number=30, + message='AutonomousDatabaseConnectionUrls', + ) + failed_data_recovery_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=31, + message=duration_pb2.Duration, + ) + memory_table_gbs: int = proto.Field( + proto.INT32, + number=32, + ) + is_local_data_guard_enabled: bool = proto.Field( + proto.BOOL, + number=33, + ) + local_adg_auto_failover_max_data_loss_limit: int = proto.Field( + proto.INT32, + number=35, + ) + local_standby_db: 'AutonomousDatabaseStandbySummary' = proto.Field( + proto.MESSAGE, + number=36, + message='AutonomousDatabaseStandbySummary', + ) + memory_per_oracle_compute_unit_gbs: int = proto.Field( + proto.INT32, + number=37, + ) + local_disaster_recovery_type: LocalDisasterRecoveryType = proto.Field( + proto.ENUM, + number=38, + enum=LocalDisasterRecoveryType, + ) + data_safe_state: DataSafeState = proto.Field( + proto.ENUM, + number=39, + enum=DataSafeState, + ) + database_management_state: DatabaseManagementState = proto.Field( + proto.ENUM, + number=40, + enum=DatabaseManagementState, + ) + open_mode: OpenMode = proto.Field( + proto.ENUM, + number=41, + enum=OpenMode, + ) + operations_insights_state: 'OperationsInsightsState' = proto.Field( + proto.ENUM, + number=42, + enum='OperationsInsightsState', + ) + peer_db_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=43, + ) + permission_level: PermissionLevel = proto.Field( + proto.ENUM, + number=44, + enum=PermissionLevel, + ) + private_endpoint: str = proto.Field( + proto.STRING, + number=45, + ) + refreshable_mode: RefreshableMode = proto.Field( + proto.ENUM, + number=46, + enum=RefreshableMode, + ) + refreshable_state: RefreshableState = proto.Field( + proto.ENUM, + number=47, + enum=RefreshableState, + ) + role: Role = proto.Field( + proto.ENUM, + number=48, + enum=Role, + ) + scheduled_operation_details: MutableSequence['ScheduledOperationDetails'] = proto.RepeatedField( + proto.MESSAGE, + number=64, + message='ScheduledOperationDetails', + ) + sql_web_developer_url: str = proto.Field( + proto.STRING, + number=50, + ) + supported_clone_regions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=51, + ) + used_data_storage_size_tbs: int = proto.Field( + proto.INT32, + number=53, + ) + oci_url: str = proto.Field( + proto.STRING, + number=54, + ) + total_auto_backup_storage_size_gbs: float = proto.Field( + proto.FLOAT, + number=59, + ) + next_long_term_backup_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=60, + message=timestamp_pb2.Timestamp, + ) + maintenance_begin_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=65, + message=timestamp_pb2.Timestamp, + ) + maintenance_end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=66, + message=timestamp_pb2.Timestamp, + ) + + +class AutonomousDatabaseApex(proto.Message): + r"""Oracle APEX Application Development. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/datatypes/AutonomousDatabaseApex + + Attributes: + apex_version (str): + Output only. The Oracle APEX Application + Development version. + ords_version (str): + Output only. The Oracle REST Data Services + (ORDS) version. + """ + + apex_version: str = proto.Field( + proto.STRING, + number=1, + ) + ords_version: str = proto.Field( + proto.STRING, + number=2, + ) + + +class AutonomousDatabaseConnectionStrings(proto.Message): + r"""The connection string used to connect to the Autonomous + Database. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/datatypes/AutonomousDatabaseConnectionStrings + + Attributes: + all_connection_strings (google.cloud.oracledatabase_v1.types.AllConnectionStrings): + Output only. Returns all connection strings + that can be used to connect to the Autonomous + Database. + dedicated (str): + Output only. The database service provides + the least level of resources to each SQL + statement, but supports the most number of + concurrent SQL statements. + high (str): + Output only. The database service provides + the highest level of resources to each SQL + statement. + low (str): + Output only. The database service provides + the least level of resources to each SQL + statement. + medium (str): + Output only. The database service provides a + lower level of resources to each SQL statement. + profiles (MutableSequence[google.cloud.oracledatabase_v1.types.DatabaseConnectionStringProfile]): + Output only. A list of connection string + profiles to allow clients to group, filter, and + select values based on the structured metadata. + """ + + all_connection_strings: 'AllConnectionStrings' = proto.Field( + proto.MESSAGE, + number=1, + message='AllConnectionStrings', + ) + dedicated: str = proto.Field( + proto.STRING, + number=2, + ) + high: str = proto.Field( + proto.STRING, + number=3, + ) + low: str = proto.Field( + proto.STRING, + number=4, + ) + medium: str = proto.Field( + proto.STRING, + number=5, + ) + profiles: MutableSequence['DatabaseConnectionStringProfile'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='DatabaseConnectionStringProfile', + ) + + +class DatabaseConnectionStringProfile(proto.Message): + r"""The connection string profile to allow clients to group. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/datatypes/DatabaseConnectionStringProfile + + Attributes: + consumer_group (google.cloud.oracledatabase_v1.types.DatabaseConnectionStringProfile.ConsumerGroup): + Output only. The current consumer group being + used by the connection. + display_name (str): + Output only. The display name for the + database connection. + host_format (google.cloud.oracledatabase_v1.types.DatabaseConnectionStringProfile.HostFormat): + Output only. The host name format being + currently used in connection string. + is_regional (bool): + Output only. This field indicates if the + connection string is regional and is only + applicable for cross-region Data Guard. + protocol (google.cloud.oracledatabase_v1.types.DatabaseConnectionStringProfile.Protocol): + Output only. The protocol being used by the + connection. + session_mode (google.cloud.oracledatabase_v1.types.DatabaseConnectionStringProfile.SessionMode): + Output only. The current session mode of the + connection. + syntax_format (google.cloud.oracledatabase_v1.types.DatabaseConnectionStringProfile.SyntaxFormat): + Output only. The syntax of the connection + string. + tls_authentication (google.cloud.oracledatabase_v1.types.DatabaseConnectionStringProfile.TLSAuthentication): + Output only. This field indicates the TLS + authentication type of the connection. + value (str): + Output only. The value of the connection + string. + """ + class ConsumerGroup(proto.Enum): + r"""The various consumer groups available in the connection + string profile. + + Values: + CONSUMER_GROUP_UNSPECIFIED (0): + Default unspecified value. + HIGH (1): + High consumer group. + MEDIUM (2): + Medium consumer group. + LOW (3): + Low consumer group. + TP (4): + TP consumer group. + TPURGENT (5): + TPURGENT consumer group. + """ + CONSUMER_GROUP_UNSPECIFIED = 0 + HIGH = 1 + MEDIUM = 2 + LOW = 3 + TP = 4 + TPURGENT = 5 + + class HostFormat(proto.Enum): + r"""The host name format being used in the connection string. + + Values: + HOST_FORMAT_UNSPECIFIED (0): + Default unspecified value. + FQDN (1): + FQDN + IP (2): + IP + """ + HOST_FORMAT_UNSPECIFIED = 0 + FQDN = 1 + IP = 2 + + class Protocol(proto.Enum): + r"""The protocol being used by the connection. + + Values: + PROTOCOL_UNSPECIFIED (0): + Default unspecified value. + TCP (1): + Tcp + TCPS (2): + Tcps + """ + PROTOCOL_UNSPECIFIED = 0 + TCP = 1 + TCPS = 2 + + class SessionMode(proto.Enum): + r"""The session mode of the connection. + + Values: + SESSION_MODE_UNSPECIFIED (0): + Default unspecified value. + DIRECT (1): + Direct + INDIRECT (2): + Indirect + """ + SESSION_MODE_UNSPECIFIED = 0 + DIRECT = 1 + INDIRECT = 2 + + class SyntaxFormat(proto.Enum): + r"""Specifies syntax of the connection string. + + Values: + SYNTAX_FORMAT_UNSPECIFIED (0): + Default unspecified value. + LONG (1): + Long + EZCONNECT (2): + Ezconnect + EZCONNECTPLUS (3): + Ezconnectplus + """ + SYNTAX_FORMAT_UNSPECIFIED = 0 + LONG = 1 + EZCONNECT = 2 + EZCONNECTPLUS = 3 + + class TLSAuthentication(proto.Enum): + r"""This field indicates the TLS authentication type of the + connection. + + Values: + TLS_AUTHENTICATION_UNSPECIFIED (0): + Default unspecified value. + SERVER (1): + Server + MUTUAL (2): + Mutual + """ + TLS_AUTHENTICATION_UNSPECIFIED = 0 + SERVER = 1 + MUTUAL = 2 + + consumer_group: ConsumerGroup = proto.Field( + proto.ENUM, + number=1, + enum=ConsumerGroup, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + host_format: HostFormat = proto.Field( + proto.ENUM, + number=3, + enum=HostFormat, + ) + is_regional: bool = proto.Field( + proto.BOOL, + number=4, + ) + protocol: Protocol = proto.Field( + proto.ENUM, + number=5, + enum=Protocol, + ) + session_mode: SessionMode = proto.Field( + proto.ENUM, + number=6, + enum=SessionMode, + ) + syntax_format: SyntaxFormat = proto.Field( + proto.ENUM, + number=7, + enum=SyntaxFormat, + ) + tls_authentication: TLSAuthentication = proto.Field( + proto.ENUM, + number=8, + enum=TLSAuthentication, + ) + value: str = proto.Field( + proto.STRING, + number=9, + ) + + +class AllConnectionStrings(proto.Message): + r"""A list of all connection strings that can be used to connect + to the Autonomous Database. + + Attributes: + high (str): + Output only. The database service provides + the highest level of resources to each SQL + statement. + low (str): + Output only. The database service provides + the least level of resources to each SQL + statement. + medium (str): + Output only. The database service provides a + lower level of resources to each SQL statement. + """ + + high: str = proto.Field( + proto.STRING, + number=1, + ) + low: str = proto.Field( + proto.STRING, + number=2, + ) + medium: str = proto.Field( + proto.STRING, + number=3, + ) + + +class AutonomousDatabaseConnectionUrls(proto.Message): + r"""The URLs for accessing Oracle Application Express (APEX) and + SQL Developer Web with a browser from a Compute instance. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/datatypes/AutonomousDatabaseConnectionUrls + + Attributes: + apex_uri (str): + Output only. Oracle Application Express + (APEX) URL. + database_transforms_uri (str): + Output only. The URL of the Database + Transforms for the Autonomous Database. + graph_studio_uri (str): + Output only. The URL of the Graph Studio for + the Autonomous Database. + machine_learning_notebook_uri (str): + Output only. The URL of the Oracle Machine + Learning (OML) Notebook for the Autonomous + Database. + machine_learning_user_management_uri (str): + Output only. The URL of Machine Learning user + management the Autonomous Database. + mongo_db_uri (str): + Output only. The URL of the MongoDB API for + the Autonomous Database. + ords_uri (str): + Output only. The Oracle REST Data Services + (ORDS) URL of the Web Access for the Autonomous + Database. + sql_dev_web_uri (str): + Output only. The URL of the Oracle SQL + Developer Web for the Autonomous Database. + """ + + apex_uri: str = proto.Field( + proto.STRING, + number=1, + ) + database_transforms_uri: str = proto.Field( + proto.STRING, + number=2, + ) + graph_studio_uri: str = proto.Field( + proto.STRING, + number=3, + ) + machine_learning_notebook_uri: str = proto.Field( + proto.STRING, + number=4, + ) + machine_learning_user_management_uri: str = proto.Field( + proto.STRING, + number=5, + ) + mongo_db_uri: str = proto.Field( + proto.STRING, + number=6, + ) + ords_uri: str = proto.Field( + proto.STRING, + number=7, + ) + sql_dev_web_uri: str = proto.Field( + proto.STRING, + number=8, + ) + + +class AutonomousDatabaseStandbySummary(proto.Message): + r"""Autonomous Data Guard standby database details. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/datatypes/AutonomousDatabaseStandbySummary + + Attributes: + lag_time_duration (google.protobuf.duration_pb2.Duration): + Output only. The amount of time, in seconds, + that the data of the standby database lags in + comparison to the data of the primary database. + lifecycle_details (str): + Output only. The additional details about the + current lifecycle state of the Autonomous + Database. + state (google.cloud.oracledatabase_v1.types.State): + Output only. The current lifecycle state of + the Autonomous Database. + data_guard_role_changed_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The date and time the Autonomous + Data Guard role was switched for the standby + Autonomous Database. + disaster_recovery_role_changed_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The date and time the Disaster + Recovery role was switched for the standby + Autonomous Database. + """ + + lag_time_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + lifecycle_details: str = proto.Field( + proto.STRING, + number=2, + ) + state: 'State' = proto.Field( + proto.ENUM, + number=3, + enum='State', + ) + data_guard_role_changed_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + disaster_recovery_role_changed_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class ScheduledOperationDetails(proto.Message): + r"""Details of scheduled operation. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/datatypes/ScheduledOperationDetails + + Attributes: + day_of_week (google.type.dayofweek_pb2.DayOfWeek): + Output only. Day of week. + start_time (google.type.timeofday_pb2.TimeOfDay): + Output only. Auto start time. + stop_time (google.type.timeofday_pb2.TimeOfDay): + Output only. Auto stop time. + """ + + day_of_week: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=1, + enum=dayofweek_pb2.DayOfWeek, + ) + start_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=4, + message=timeofday_pb2.TimeOfDay, + ) + stop_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=5, + message=timeofday_pb2.TimeOfDay, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_database_character_set.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_database_character_set.py new file mode 100644 index 000000000000..dacd4b6dc95b --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_database_character_set.py @@ -0,0 +1,78 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.oracledatabase.v1', + manifest={ + 'AutonomousDatabaseCharacterSet', + }, +) + + +class AutonomousDatabaseCharacterSet(proto.Message): + r"""Details of the Autonomous Database character set resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/AutonomousDatabaseCharacterSets/ + + Attributes: + name (str): + Identifier. The name of the Autonomous Database Character + Set resource in the following format: + projects/{project}/locations/{region}/autonomousDatabaseCharacterSets/{autonomous_database_character_set} + character_set_type (google.cloud.oracledatabase_v1.types.AutonomousDatabaseCharacterSet.CharacterSetType): + Output only. The character set type for the + Autonomous Database. + character_set (str): + Output only. The character set name for the + Autonomous Database which is the ID in the + resource name. + """ + class CharacterSetType(proto.Enum): + r"""The type of character set an Autonomous Database can have. + + Values: + CHARACTER_SET_TYPE_UNSPECIFIED (0): + Character set type is not specified. + DATABASE (1): + Character set type is set to database. + NATIONAL (2): + Character set type is set to national. + """ + CHARACTER_SET_TYPE_UNSPECIFIED = 0 + DATABASE = 1 + NATIONAL = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + character_set_type: CharacterSetType = proto.Field( + proto.ENUM, + number=2, + enum=CharacterSetType, + ) + character_set: str = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_db_backup.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_db_backup.py new file mode 100644 index 000000000000..c7f900cfb859 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_db_backup.py @@ -0,0 +1,290 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.oracledatabase.v1', + manifest={ + 'AutonomousDatabaseBackup', + 'AutonomousDatabaseBackupProperties', + }, +) + + +class AutonomousDatabaseBackup(proto.Message): + r"""Details of the Autonomous Database Backup resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/AutonomousDatabaseBackup/ + + Attributes: + name (str): + Identifier. The name of the Autonomous Database Backup + resource with the format: + projects/{project}/locations/{region}/autonomousDatabaseBackups/{autonomous_database_backup} + autonomous_database (str): + Required. The name of the Autonomous Database resource for + which the backup is being created. Format: + projects/{project}/locations/{region}/autonomousDatabases/{autonomous_database} + display_name (str): + Optional. User friendly name for the Backup. + The name does not have to be unique. + properties (google.cloud.oracledatabase_v1.types.AutonomousDatabaseBackupProperties): + Optional. Various properties of the backup. + labels (MutableMapping[str, str]): + Optional. labels or tags associated with the + resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + autonomous_database: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + properties: 'AutonomousDatabaseBackupProperties' = proto.Field( + proto.MESSAGE, + number=4, + message='AutonomousDatabaseBackupProperties', + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + + +class AutonomousDatabaseBackupProperties(proto.Message): + r"""Properties of the Autonomous Database Backup resource. + + Attributes: + ocid (str): + Output only. OCID of the Autonomous Database + backup. + https://docs.oracle.com/en-us/iaas/Content/General/Concepts/identifiers.htm#Oracle + retention_period_days (int): + Optional. Retention period in days for the + backup. + compartment_id (str): + Output only. The OCID of the compartment. + database_size_tb (float): + Output only. The quantity of data in the + database, in terabytes. + db_version (str): + Output only. A valid Oracle Database version + for Autonomous Database. + is_long_term_backup (bool): + Output only. Indicates if the backup is long + term backup. + is_automatic_backup (bool): + Output only. Indicates if the backup is + automatic or user initiated. + is_restorable (bool): + Output only. Indicates if the backup can be + used to restore the Autonomous Database. + key_store_id (str): + Optional. The OCID of the key store of Oracle + Vault. + key_store_wallet (str): + Optional. The wallet name for Oracle Key + Vault. + kms_key_id (str): + Optional. The OCID of the key container that + is used as the master encryption key in database + transparent data encryption (TDE) operations. + kms_key_version_id (str): + Optional. The OCID of the key container + version that is used in database transparent + data encryption (TDE) operations KMS Key can + have multiple key versions. If none is + specified, the current key version (latest) of + the Key Id is used for the operation. Autonomous + Database Serverless does not use key versions, + hence is not applicable for Autonomous Database + Serverless instances. + lifecycle_details (str): + Output only. Additional information about the + current lifecycle state. + lifecycle_state (google.cloud.oracledatabase_v1.types.AutonomousDatabaseBackupProperties.State): + Output only. The lifecycle state of the + backup. + size_tb (float): + Output only. The backup size in terabytes. + available_till_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp until when the backup + will be available. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The date and time the backup + completed. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The date and time the backup + started. + type_ (google.cloud.oracledatabase_v1.types.AutonomousDatabaseBackupProperties.Type): + Output only. The type of the backup. + vault_id (str): + Optional. The OCID of the vault. + """ + class State(proto.Enum): + r"""// The various lifecycle states of the Autonomous Database + Backup. + + Values: + STATE_UNSPECIFIED (0): + Default unspecified value. + CREATING (1): + Indicates that the resource is in creating + state. + ACTIVE (2): + Indicates that the resource is in active + state. + DELETING (3): + Indicates that the resource is in deleting + state. + DELETED (4): + Indicates that the resource is in deleted + state. + FAILED (6): + Indicates that the resource is in failed + state. + UPDATING (7): + Indicates that the resource is in updating + state. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + DELETED = 4 + FAILED = 6 + UPDATING = 7 + + class Type(proto.Enum): + r"""The type of the backup. + + Values: + TYPE_UNSPECIFIED (0): + Default unspecified value. + INCREMENTAL (1): + Incremental backups. + FULL (2): + Full backups. + LONG_TERM (3): + Long term backups. + """ + TYPE_UNSPECIFIED = 0 + INCREMENTAL = 1 + FULL = 2 + LONG_TERM = 3 + + ocid: str = proto.Field( + proto.STRING, + number=1, + ) + retention_period_days: int = proto.Field( + proto.INT32, + number=2, + ) + compartment_id: str = proto.Field( + proto.STRING, + number=3, + ) + database_size_tb: float = proto.Field( + proto.FLOAT, + number=4, + ) + db_version: str = proto.Field( + proto.STRING, + number=5, + ) + is_long_term_backup: bool = proto.Field( + proto.BOOL, + number=6, + ) + is_automatic_backup: bool = proto.Field( + proto.BOOL, + number=7, + ) + is_restorable: bool = proto.Field( + proto.BOOL, + number=8, + ) + key_store_id: str = proto.Field( + proto.STRING, + number=9, + ) + key_store_wallet: str = proto.Field( + proto.STRING, + number=10, + ) + kms_key_id: str = proto.Field( + proto.STRING, + number=11, + ) + kms_key_version_id: str = proto.Field( + proto.STRING, + number=12, + ) + lifecycle_details: str = proto.Field( + proto.STRING, + number=13, + ) + lifecycle_state: State = proto.Field( + proto.ENUM, + number=14, + enum=State, + ) + size_tb: float = proto.Field( + proto.FLOAT, + number=15, + ) + available_till_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=16, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=17, + message=timestamp_pb2.Timestamp, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=18, + message=timestamp_pb2.Timestamp, + ) + type_: Type = proto.Field( + proto.ENUM, + number=19, + enum=Type, + ) + vault_id: str = proto.Field( + proto.STRING, + number=20, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_db_version.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_db_version.py new file mode 100644 index 000000000000..f6773072c559 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_db_version.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.oracledatabase_v1.types import autonomous_database + + +__protobuf__ = proto.module( + package='google.cloud.oracledatabase.v1', + manifest={ + 'AutonomousDbVersion', + }, +) + + +class AutonomousDbVersion(proto.Message): + r"""Details of the Autonomous Database version. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/AutonomousDbVersionSummary/ + + Attributes: + name (str): + Identifier. The name of the Autonomous Database Version + resource with the format: + projects/{project}/locations/{region}/autonomousDbVersions/{autonomous_db_version} + version (str): + Output only. An Oracle Database version for + Autonomous Database. + db_workload (google.cloud.oracledatabase_v1.types.DBWorkload): + Output only. The Autonomous Database workload + type. + workload_uri (str): + Output only. A URL that points to a detailed + description of the Autonomous Database version. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + version: str = proto.Field( + proto.STRING, + number=2, + ) + db_workload: autonomous_database.DBWorkload = proto.Field( + proto.ENUM, + number=4, + enum=autonomous_database.DBWorkload, + ) + workload_uri: str = proto.Field( + proto.STRING, + number=5, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/common.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/common.py new file mode 100644 index 000000000000..54ef447a9f93 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/common.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.oracledatabase.v1', + manifest={ + 'CustomerContact', + }, +) + + +class CustomerContact(proto.Message): + r"""The CustomerContact reference as defined by Oracle. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/datatypes/CustomerContact + + Attributes: + email (str): + Required. The email address used by Oracle to + send notifications regarding databases and + infrastructure. + """ + + email: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_node.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_node.py new file mode 100644 index 000000000000..499c37d17305 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_node.py @@ -0,0 +1,158 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.oracledatabase.v1', + manifest={ + 'DbNode', + 'DbNodeProperties', + }, +) + + +class DbNode(proto.Message): + r"""Details of the database node resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/DbNode/ + + Attributes: + name (str): + Identifier. The name of the database node resource in the + following format: + projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}/dbNodes/{db_node} + properties (google.cloud.oracledatabase_v1.types.DbNodeProperties): + Optional. Various properties of the database + node. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + properties: 'DbNodeProperties' = proto.Field( + proto.MESSAGE, + number=3, + message='DbNodeProperties', + ) + + +class DbNodeProperties(proto.Message): + r"""Various properties and settings associated with Db node. + + Attributes: + ocid (str): + Output only. OCID of database node. + ocpu_count (int): + Optional. OCPU count per database node. + memory_size_gb (int): + Memory allocated in GBs. + db_node_storage_size_gb (int): + Optional. Local storage per database node. + db_server_ocid (str): + Optional. Database server OCID. + hostname (str): + Optional. DNS + state (google.cloud.oracledatabase_v1.types.DbNodeProperties.State): + Output only. State of the database node. + total_cpu_core_count (int): + Total CPU core count of the database node. + """ + class State(proto.Enum): + r"""The various lifecycle states of the database node. + + Values: + STATE_UNSPECIFIED (0): + Default unspecified value. + PROVISIONING (1): + Indicates that the resource is in + provisioning state. + AVAILABLE (2): + Indicates that the resource is in available + state. + UPDATING (3): + Indicates that the resource is in updating + state. + STOPPING (4): + Indicates that the resource is in stopping + state. + STOPPED (5): + Indicates that the resource is in stopped + state. + STARTING (6): + Indicates that the resource is in starting + state. + TERMINATING (7): + Indicates that the resource is in terminating + state. + TERMINATED (8): + Indicates that the resource is in terminated + state. + FAILED (9): + Indicates that the resource is in failed + state. + """ + STATE_UNSPECIFIED = 0 + PROVISIONING = 1 + AVAILABLE = 2 + UPDATING = 3 + STOPPING = 4 + STOPPED = 5 + STARTING = 6 + TERMINATING = 7 + TERMINATED = 8 + FAILED = 9 + + ocid: str = proto.Field( + proto.STRING, + number=1, + ) + ocpu_count: int = proto.Field( + proto.INT32, + number=2, + ) + memory_size_gb: int = proto.Field( + proto.INT32, + number=3, + ) + db_node_storage_size_gb: int = proto.Field( + proto.INT32, + number=4, + ) + db_server_ocid: str = proto.Field( + proto.STRING, + number=5, + ) + hostname: str = proto.Field( + proto.STRING, + number=8, + ) + state: State = proto.Field( + proto.ENUM, + number=9, + enum=State, + ) + total_cpu_core_count: int = proto.Field( + proto.INT32, + number=10, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_server.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_server.py new file mode 100644 index 000000000000..f7b0a1cfc988 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_server.py @@ -0,0 +1,163 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.oracledatabase.v1', + manifest={ + 'DbServer', + 'DbServerProperties', + }, +) + + +class DbServer(proto.Message): + r"""Details of the database server resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/DbServer/ + + Attributes: + name (str): + Identifier. The name of the database server resource with + the format: + projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}/dbServers/{db_server} + display_name (str): + Optional. User friendly name for this + resource. + properties (google.cloud.oracledatabase_v1.types.DbServerProperties): + Optional. Various properties of the database + server. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + properties: 'DbServerProperties' = proto.Field( + proto.MESSAGE, + number=3, + message='DbServerProperties', + ) + + +class DbServerProperties(proto.Message): + r"""Various properties and settings associated with Exadata + database server. + + Attributes: + ocid (str): + Output only. OCID of database server. + ocpu_count (int): + Optional. OCPU count per database. + max_ocpu_count (int): + Optional. Maximum OCPU count per database. + memory_size_gb (int): + Optional. Memory allocated in GBs. + max_memory_size_gb (int): + Optional. Maximum memory allocated in GBs. + db_node_storage_size_gb (int): + Optional. Local storage per VM. + max_db_node_storage_size_gb (int): + Optional. Maximum local storage per VM. + vm_count (int): + Optional. Vm count per database. + state (google.cloud.oracledatabase_v1.types.DbServerProperties.State): + Output only. State of the database server. + db_node_ids (MutableSequence[str]): + Output only. OCID of database nodes + associated with the database server. + """ + class State(proto.Enum): + r"""The various lifecycle states of the database server. + + Values: + STATE_UNSPECIFIED (0): + Default unspecified value. + CREATING (1): + Indicates that the resource is in creating + state. + AVAILABLE (2): + Indicates that the resource is in available + state. + UNAVAILABLE (3): + Indicates that the resource is in unavailable + state. + DELETING (4): + Indicates that the resource is in deleting + state. + DELETED (5): + Indicates that the resource is in deleted + state. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + AVAILABLE = 2 + UNAVAILABLE = 3 + DELETING = 4 + DELETED = 5 + + ocid: str = proto.Field( + proto.STRING, + number=1, + ) + ocpu_count: int = proto.Field( + proto.INT32, + number=2, + ) + max_ocpu_count: int = proto.Field( + proto.INT32, + number=3, + ) + memory_size_gb: int = proto.Field( + proto.INT32, + number=4, + ) + max_memory_size_gb: int = proto.Field( + proto.INT32, + number=5, + ) + db_node_storage_size_gb: int = proto.Field( + proto.INT32, + number=6, + ) + max_db_node_storage_size_gb: int = proto.Field( + proto.INT32, + number=7, + ) + vm_count: int = proto.Field( + proto.INT32, + number=8, + ) + state: State = proto.Field( + proto.ENUM, + number=9, + enum=State, + ) + db_node_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=10, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_system_shape.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_system_shape.py new file mode 100644 index 000000000000..98637445e52b --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_system_shape.py @@ -0,0 +1,118 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.oracledatabase.v1', + manifest={ + 'DbSystemShape', + }, +) + + +class DbSystemShape(proto.Message): + r"""Details of the Database System Shapes resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/DbSystemShapeSummary/ + + Attributes: + name (str): + Identifier. The name of the Database System Shape resource + with the format: + projects/{project}/locations/{region}/dbSystemShapes/{db_system_shape} + shape (str): + Optional. shape + min_node_count (int): + Optional. Minimum number of database servers. + max_node_count (int): + Optional. Maximum number of database servers. + min_storage_count (int): + Optional. Minimum number of storage servers. + max_storage_count (int): + Optional. Maximum number of storage servers. + available_core_count_per_node (int): + Optional. Number of cores per node. + available_memory_per_node_gb (int): + Optional. Memory per database server node in + gigabytes. + available_data_storage_tb (int): + Optional. Storage per storage server in + terabytes. + min_core_count_per_node (int): + Optional. Minimum core count per node. + min_memory_per_node_gb (int): + Optional. Minimum memory per node in + gigabytes. + min_db_node_storage_per_node_gb (int): + Optional. Minimum node storage per database + server in gigabytes. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + shape: str = proto.Field( + proto.STRING, + number=2, + ) + min_node_count: int = proto.Field( + proto.INT32, + number=3, + ) + max_node_count: int = proto.Field( + proto.INT32, + number=4, + ) + min_storage_count: int = proto.Field( + proto.INT32, + number=5, + ) + max_storage_count: int = proto.Field( + proto.INT32, + number=6, + ) + available_core_count_per_node: int = proto.Field( + proto.INT32, + number=7, + ) + available_memory_per_node_gb: int = proto.Field( + proto.INT32, + number=8, + ) + available_data_storage_tb: int = proto.Field( + proto.INT32, + number=9, + ) + min_core_count_per_node: int = proto.Field( + proto.INT32, + number=10, + ) + min_memory_per_node_gb: int = proto.Field( + proto.INT32, + number=11, + ) + min_db_node_storage_per_node_gb: int = proto.Field( + proto.INT32, + number=12, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/entitlement.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/entitlement.py new file mode 100644 index 000000000000..6314fc926c5e --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/entitlement.py @@ -0,0 +1,127 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.oracledatabase.v1', + manifest={ + 'Entitlement', + 'CloudAccountDetails', + }, +) + + +class Entitlement(proto.Message): + r"""Details of the Entitlement resource. + + Attributes: + name (str): + Identifier. The name of the Entitlement + resource with the format: + projects/{project}/locations/{region}/entitlements/{entitlement} + cloud_account_details (google.cloud.oracledatabase_v1.types.CloudAccountDetails): + Details of the OCI Cloud Account. + entitlement_id (str): + Output only. Google Cloud Marketplace order + ID (aka entitlement ID) + state (google.cloud.oracledatabase_v1.types.Entitlement.State): + Output only. Entitlement State. + """ + class State(proto.Enum): + r"""The various lifecycle states of the subscription. + + Values: + STATE_UNSPECIFIED (0): + Default unspecified value. + ACCOUNT_NOT_LINKED (1): + Account not linked. + ACCOUNT_NOT_ACTIVE (2): + Account is linked but not active. + ACTIVE (3): + Entitlement and Account are active. + """ + STATE_UNSPECIFIED = 0 + ACCOUNT_NOT_LINKED = 1 + ACCOUNT_NOT_ACTIVE = 2 + ACTIVE = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + cloud_account_details: 'CloudAccountDetails' = proto.Field( + proto.MESSAGE, + number=2, + message='CloudAccountDetails', + ) + entitlement_id: str = proto.Field( + proto.STRING, + number=3, + ) + state: State = proto.Field( + proto.ENUM, + number=4, + enum=State, + ) + + +class CloudAccountDetails(proto.Message): + r"""Details of the OCI Cloud Account. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + cloud_account (str): + Output only. OCI account name. + cloud_account_home_region (str): + Output only. OCI account home region. + link_existing_account_uri (str): + Output only. URL to link an existing account. + + This field is a member of `oneof`_ ``_link_existing_account_uri``. + account_creation_uri (str): + Output only. URL to create a new account and + link. + + This field is a member of `oneof`_ ``_account_creation_uri``. + """ + + cloud_account: str = proto.Field( + proto.STRING, + number=1, + ) + cloud_account_home_region: str = proto.Field( + proto.STRING, + number=2, + ) + link_existing_account_uri: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + account_creation_uri: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/exadata_infra.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/exadata_infra.py new file mode 100644 index 000000000000..7e4669189feb --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/exadata_infra.py @@ -0,0 +1,468 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.oracledatabase_v1.types import common +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import month_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.oracledatabase.v1', + manifest={ + 'CloudExadataInfrastructure', + 'CloudExadataInfrastructureProperties', + 'MaintenanceWindow', + }, +) + + +class CloudExadataInfrastructure(proto.Message): + r"""Represents CloudExadataInfrastructure resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/CloudExadataInfrastructure/ + + Attributes: + name (str): + Identifier. The name of the Exadata Infrastructure resource + with the format: + projects/{project}/locations/{region}/cloudExadataInfrastructures/{cloud_exadata_infrastructure} + display_name (str): + Optional. User friendly name for this + resource. + gcp_oracle_zone (str): + Optional. Google Cloud Platform location + where Oracle Exadata is hosted. + entitlement_id (str): + Output only. Entitlement ID of the private + offer against which this infrastructure resource + is provisioned. + properties (google.cloud.oracledatabase_v1.types.CloudExadataInfrastructureProperties): + Optional. Various properties of the infra. + labels (MutableMapping[str, str]): + Optional. Labels or tags associated with the + resource. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The date and time that the + Exadata Infrastructure was created. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + gcp_oracle_zone: str = proto.Field( + proto.STRING, + number=8, + ) + entitlement_id: str = proto.Field( + proto.STRING, + number=4, + ) + properties: 'CloudExadataInfrastructureProperties' = proto.Field( + proto.MESSAGE, + number=5, + message='CloudExadataInfrastructureProperties', + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + + +class CloudExadataInfrastructureProperties(proto.Message): + r"""Various properties of Exadata Infrastructure. + + Attributes: + ocid (str): + Output only. OCID of created infra. + https://docs.oracle.com/en-us/iaas/Content/General/Concepts/identifiers.htm#Oracle + compute_count (int): + Optional. The number of compute servers for + the Exadata Infrastructure. + storage_count (int): + Optional. The number of Cloud Exadata storage + servers for the Exadata Infrastructure. + total_storage_size_gb (int): + Optional. The total storage allocated to the + Exadata Infrastructure resource, in gigabytes + (GB). + available_storage_size_gb (int): + Output only. The available storage can be + allocated to the Exadata Infrastructure + resource, in gigabytes (GB). + maintenance_window (google.cloud.oracledatabase_v1.types.MaintenanceWindow): + Optional. Maintenance window for repair. + state (google.cloud.oracledatabase_v1.types.CloudExadataInfrastructureProperties.State): + Output only. The current lifecycle state of + the Exadata Infrastructure. + shape (str): + Required. The shape of the Exadata + Infrastructure. The shape determines the amount + of CPU, storage, and memory resources allocated + to the instance. + oci_url (str): + Output only. Deep link to the OCI console to + view this resource. + cpu_count (int): + Optional. The number of enabled CPU cores. + max_cpu_count (int): + Output only. The total number of CPU cores + available. + memory_size_gb (int): + Optional. The memory allocated in GBs. + max_memory_gb (int): + Output only. The total memory available in + GBs. + db_node_storage_size_gb (int): + Optional. The local node storage allocated in + GBs. + max_db_node_storage_size_gb (int): + Output only. The total local node storage + available in GBs. + data_storage_size_tb (float): + Output only. Size, in terabytes, of the DATA + disk group. + max_data_storage_tb (float): + Output only. The total available DATA disk + group size. + activated_storage_count (int): + Output only. The requested number of + additional storage servers activated for the + Exadata Infrastructure. + additional_storage_count (int): + Output only. The requested number of + additional storage servers for the Exadata + Infrastructure. + db_server_version (str): + Output only. The software version of the + database servers (dom0) in the Exadata + Infrastructure. + storage_server_version (str): + Output only. The software version of the + storage servers (cells) in the Exadata + Infrastructure. + next_maintenance_run_id (str): + Output only. The OCID of the next maintenance + run. + next_maintenance_run_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the next + maintenance run will occur. + next_security_maintenance_run_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the next security + maintenance run will occur. + customer_contacts (MutableSequence[google.cloud.oracledatabase_v1.types.CustomerContact]): + Optional. The list of customer contacts. + monthly_storage_server_version (str): + Output only. The monthly software version of + the storage servers (cells) in the Exadata + Infrastructure. Example: 20.1.15 + monthly_db_server_version (str): + Output only. The monthly software version of + the database servers (dom0) in the Exadata + Infrastructure. Example: 20.1.15 + """ + class State(proto.Enum): + r"""The various lifecycle states of the Exadata Infrastructure. + + Values: + STATE_UNSPECIFIED (0): + Default unspecified value. + PROVISIONING (1): + The Exadata Infrastructure is being + provisioned. + AVAILABLE (2): + The Exadata Infrastructure is available for + use. + UPDATING (3): + The Exadata Infrastructure is being updated. + TERMINATING (4): + The Exadata Infrastructure is being + terminated. + TERMINATED (5): + The Exadata Infrastructure is terminated. + FAILED (6): + The Exadata Infrastructure is in failed + state. + MAINTENANCE_IN_PROGRESS (7): + The Exadata Infrastructure is in maintenance. + """ + STATE_UNSPECIFIED = 0 + PROVISIONING = 1 + AVAILABLE = 2 + UPDATING = 3 + TERMINATING = 4 + TERMINATED = 5 + FAILED = 6 + MAINTENANCE_IN_PROGRESS = 7 + + ocid: str = proto.Field( + proto.STRING, + number=1, + ) + compute_count: int = proto.Field( + proto.INT32, + number=2, + ) + storage_count: int = proto.Field( + proto.INT32, + number=3, + ) + total_storage_size_gb: int = proto.Field( + proto.INT32, + number=4, + ) + available_storage_size_gb: int = proto.Field( + proto.INT32, + number=5, + ) + maintenance_window: 'MaintenanceWindow' = proto.Field( + proto.MESSAGE, + number=6, + message='MaintenanceWindow', + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + shape: str = proto.Field( + proto.STRING, + number=8, + ) + oci_url: str = proto.Field( + proto.STRING, + number=9, + ) + cpu_count: int = proto.Field( + proto.INT32, + number=10, + ) + max_cpu_count: int = proto.Field( + proto.INT32, + number=11, + ) + memory_size_gb: int = proto.Field( + proto.INT32, + number=12, + ) + max_memory_gb: int = proto.Field( + proto.INT32, + number=13, + ) + db_node_storage_size_gb: int = proto.Field( + proto.INT32, + number=14, + ) + max_db_node_storage_size_gb: int = proto.Field( + proto.INT32, + number=15, + ) + data_storage_size_tb: float = proto.Field( + proto.DOUBLE, + number=16, + ) + max_data_storage_tb: float = proto.Field( + proto.DOUBLE, + number=17, + ) + activated_storage_count: int = proto.Field( + proto.INT32, + number=18, + ) + additional_storage_count: int = proto.Field( + proto.INT32, + number=19, + ) + db_server_version: str = proto.Field( + proto.STRING, + number=20, + ) + storage_server_version: str = proto.Field( + proto.STRING, + number=21, + ) + next_maintenance_run_id: str = proto.Field( + proto.STRING, + number=22, + ) + next_maintenance_run_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=23, + message=timestamp_pb2.Timestamp, + ) + next_security_maintenance_run_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=24, + message=timestamp_pb2.Timestamp, + ) + customer_contacts: MutableSequence[common.CustomerContact] = proto.RepeatedField( + proto.MESSAGE, + number=25, + message=common.CustomerContact, + ) + monthly_storage_server_version: str = proto.Field( + proto.STRING, + number=26, + ) + monthly_db_server_version: str = proto.Field( + proto.STRING, + number=27, + ) + + +class MaintenanceWindow(proto.Message): + r"""Maintenance window as defined by Oracle. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/datatypes/MaintenanceWindow + + Attributes: + preference (google.cloud.oracledatabase_v1.types.MaintenanceWindow.MaintenanceWindowPreference): + Optional. The maintenance window scheduling + preference. + months (MutableSequence[google.type.month_pb2.Month]): + Optional. Months during the year when + maintenance should be performed. + weeks_of_month (MutableSequence[int]): + Optional. Weeks during the month when + maintenance should be performed. Weeks start on + the 1st, 8th, 15th, and 22nd days of the month, + and have a duration of 7 days. Weeks start and + end based on calendar dates, not days of the + week. + days_of_week (MutableSequence[google.type.dayofweek_pb2.DayOfWeek]): + Optional. Days during the week when + maintenance should be performed. + hours_of_day (MutableSequence[int]): + Optional. The window of hours during the day + when maintenance should be performed. The window + is a 4 hour slot. Valid values are: + + 0 - represents time slot 0:00 - 3:59 UTC + 4 - represents time slot 4:00 - 7:59 UTC + 8 - represents time slot 8:00 - 11:59 UTC + 12 - represents time slot 12:00 - 15:59 UTC + 16 - represents time slot 16:00 - 19:59 UTC 20 + - represents time slot 20:00 - 23:59 UTC + lead_time_week (int): + Optional. Lead time window allows user to set + a lead time to prepare for a down time. The lead + time is in weeks and valid value is between 1 to + 4. + patching_mode (google.cloud.oracledatabase_v1.types.MaintenanceWindow.PatchingMode): + Optional. Cloud CloudExadataInfrastructure + node patching method, either "ROLLING" + or "NONROLLING". Default value is ROLLING. + custom_action_timeout_mins (int): + Optional. Determines the amount of time the + system will wait before the start of each + database server patching operation. Custom + action timeout is in minutes and valid value is + between 15 to 120 (inclusive). + is_custom_action_timeout_enabled (bool): + Optional. If true, enables the configuration + of a custom action timeout (waiting period) + between database server patching operations. + """ + class MaintenanceWindowPreference(proto.Enum): + r"""Maintenance window preference. + + Values: + MAINTENANCE_WINDOW_PREFERENCE_UNSPECIFIED (0): + Default unspecified value. + CUSTOM_PREFERENCE (1): + Custom preference. + NO_PREFERENCE (2): + No preference. + """ + MAINTENANCE_WINDOW_PREFERENCE_UNSPECIFIED = 0 + CUSTOM_PREFERENCE = 1 + NO_PREFERENCE = 2 + + class PatchingMode(proto.Enum): + r"""Patching mode. + + Values: + PATCHING_MODE_UNSPECIFIED (0): + Default unspecified value. + ROLLING (1): + Updates the Cloud Exadata database server + hosts in a rolling fashion. + NON_ROLLING (2): + The non-rolling maintenance method first + updates your storage servers at the same time, + then your database servers at the same time. + """ + PATCHING_MODE_UNSPECIFIED = 0 + ROLLING = 1 + NON_ROLLING = 2 + + preference: MaintenanceWindowPreference = proto.Field( + proto.ENUM, + number=1, + enum=MaintenanceWindowPreference, + ) + months: MutableSequence[month_pb2.Month] = proto.RepeatedField( + proto.ENUM, + number=2, + enum=month_pb2.Month, + ) + weeks_of_month: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=3, + ) + days_of_week: MutableSequence[dayofweek_pb2.DayOfWeek] = proto.RepeatedField( + proto.ENUM, + number=4, + enum=dayofweek_pb2.DayOfWeek, + ) + hours_of_day: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=5, + ) + lead_time_week: int = proto.Field( + proto.INT32, + number=6, + ) + patching_mode: PatchingMode = proto.Field( + proto.ENUM, + number=7, + enum=PatchingMode, + ) + custom_action_timeout_mins: int = proto.Field( + proto.INT32, + number=8, + ) + is_custom_action_timeout_enabled: bool = proto.Field( + proto.BOOL, + number=9, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/gi_version.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/gi_version.py new file mode 100644 index 000000000000..bc14de4f4a9c --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/gi_version.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.oracledatabase.v1', + manifest={ + 'GiVersion', + }, +) + + +class GiVersion(proto.Message): + r"""Details of the Oracle Grid Infrastructure (GI) version + resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/GiVersionSummary/ + + Attributes: + name (str): + Identifier. The name of the Oracle Grid Infrastructure (GI) + version resource with the format: + projects/{project}/locations/{region}/giVersions/{gi_versions} + version (str): + Optional. version + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + version: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/location_metadata.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/location_metadata.py new file mode 100644 index 000000000000..f6d3d2d71fc0 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/location_metadata.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.oracledatabase.v1', + manifest={ + 'LocationMetadata', + }, +) + + +class LocationMetadata(proto.Message): + r"""Metadata for a given [Location][google.cloud.location.Location]. + + Attributes: + gcp_oracle_zones (MutableSequence[str]): + Output only. Google Cloud Platform Oracle + zones in a location. + """ + + gcp_oracle_zones: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/oracledatabase.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/oracledatabase.py new file mode 100644 index 000000000000..038dbf0e4c44 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/oracledatabase.py @@ -0,0 +1,1227 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.oracledatabase_v1.types import autonomous_database as gco_autonomous_database +from google.cloud.oracledatabase_v1.types import autonomous_database_character_set +from google.cloud.oracledatabase_v1.types import autonomous_db_backup +from google.cloud.oracledatabase_v1.types import autonomous_db_version +from google.cloud.oracledatabase_v1.types import db_node +from google.cloud.oracledatabase_v1.types import db_server +from google.cloud.oracledatabase_v1.types import db_system_shape +from google.cloud.oracledatabase_v1.types import entitlement +from google.cloud.oracledatabase_v1.types import exadata_infra +from google.cloud.oracledatabase_v1.types import gi_version +from google.cloud.oracledatabase_v1.types import vm_cluster +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.oracledatabase.v1', + manifest={ + 'ListCloudExadataInfrastructuresRequest', + 'ListCloudExadataInfrastructuresResponse', + 'GetCloudExadataInfrastructureRequest', + 'CreateCloudExadataInfrastructureRequest', + 'DeleteCloudExadataInfrastructureRequest', + 'ListCloudVmClustersRequest', + 'ListCloudVmClustersResponse', + 'GetCloudVmClusterRequest', + 'CreateCloudVmClusterRequest', + 'DeleteCloudVmClusterRequest', + 'ListEntitlementsRequest', + 'ListEntitlementsResponse', + 'ListDbServersRequest', + 'ListDbServersResponse', + 'ListDbNodesRequest', + 'ListDbNodesResponse', + 'ListGiVersionsRequest', + 'ListGiVersionsResponse', + 'ListDbSystemShapesRequest', + 'ListDbSystemShapesResponse', + 'OperationMetadata', + 'ListAutonomousDatabasesRequest', + 'ListAutonomousDatabasesResponse', + 'GetAutonomousDatabaseRequest', + 'CreateAutonomousDatabaseRequest', + 'DeleteAutonomousDatabaseRequest', + 'RestoreAutonomousDatabaseRequest', + 'GenerateAutonomousDatabaseWalletRequest', + 'GenerateAutonomousDatabaseWalletResponse', + 'ListAutonomousDbVersionsRequest', + 'ListAutonomousDbVersionsResponse', + 'ListAutonomousDatabaseCharacterSetsRequest', + 'ListAutonomousDatabaseCharacterSetsResponse', + 'ListAutonomousDatabaseBackupsRequest', + 'ListAutonomousDatabaseBackupsResponse', + }, +) + + +class ListCloudExadataInfrastructuresRequest(proto.Message): + r"""The request for ``CloudExadataInfrastructures.List``. + + Attributes: + parent (str): + Required. The parent value for + CloudExadataInfrastructure in the following + format: projects/{project}/locations/{location}. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, at most 50 Exadata + infrastructures will be returned. The maximum + value is 1000; values above 1000 will be coerced + to 1000. + page_token (str): + Optional. A token identifying a page of + results the server should return. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListCloudExadataInfrastructuresResponse(proto.Message): + r"""The response for ``CloudExadataInfrastructures.list``. + + Attributes: + cloud_exadata_infrastructures (MutableSequence[google.cloud.oracledatabase_v1.types.CloudExadataInfrastructure]): + The list of Exadata Infrastructures. + next_page_token (str): + A token for fetching next page of response. + """ + + @property + def raw_page(self): + return self + + cloud_exadata_infrastructures: MutableSequence[exadata_infra.CloudExadataInfrastructure] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=exadata_infra.CloudExadataInfrastructure, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetCloudExadataInfrastructureRequest(proto.Message): + r"""The request for ``CloudExadataInfrastructure.Get``. + + Attributes: + name (str): + Required. The name of the Cloud Exadata Infrastructure in + the following format: + projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateCloudExadataInfrastructureRequest(proto.Message): + r"""The request for ``CloudExadataInfrastructure.Create``. + + Attributes: + parent (str): + Required. The parent value for + CloudExadataInfrastructure in the following + format: projects/{project}/locations/{location}. + cloud_exadata_infrastructure_id (str): + Required. The ID of the Exadata Infrastructure to create. + This value is restricted to + (^`a-z <[a-z0-9-]{0,61}[a-z0-9]>`__?$) and must be a maximum + of 63 characters in length. The value must start with a + letter and end with a letter or a number. + cloud_exadata_infrastructure (google.cloud.oracledatabase_v1.types.CloudExadataInfrastructure): + Required. Details of the Exadata + Infrastructure instance to create. + request_id (str): + Optional. An optional ID to identify the + request. This value is used to identify + duplicate requests. If you make a request with + the same request ID and the original request is + still in progress or completed, the server + ignores the second request. This prevents + clients from accidentally creating duplicate + commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + cloud_exadata_infrastructure_id: str = proto.Field( + proto.STRING, + number=2, + ) + cloud_exadata_infrastructure: exadata_infra.CloudExadataInfrastructure = proto.Field( + proto.MESSAGE, + number=3, + message=exadata_infra.CloudExadataInfrastructure, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class DeleteCloudExadataInfrastructureRequest(proto.Message): + r"""The request for ``CloudExadataInfrastructure.Delete``. + + Attributes: + name (str): + Required. The name of the Cloud Exadata Infrastructure in + the following format: + projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}. + request_id (str): + Optional. An optional ID to identify the + request. This value is used to identify + duplicate requests. If you make a request with + the same request ID and the original request is + still in progress or completed, the server + ignores the second request. This prevents + clients from accidentally creating duplicate + commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + force (bool): + Optional. If set to true, all VM clusters for + this Exadata Infrastructure will be deleted. An + Exadata Infrastructure can only be deleted once + all its VM clusters have been deleted. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + force: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class ListCloudVmClustersRequest(proto.Message): + r"""The request for ``CloudVmCluster.List``. + + Attributes: + parent (str): + Required. The name of the parent in the + following format: + projects/{project}/locations/{location}. + page_size (int): + Optional. The number of VM clusters to + return. If unspecified, at most 50 VM clusters + will be returned. The maximum value is 1,000. + page_token (str): + Optional. A token identifying the page of + results the server returns. + filter (str): + Optional. An expression for filtering the + results of the request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListCloudVmClustersResponse(proto.Message): + r"""The response for ``CloudVmCluster.List``. + + Attributes: + cloud_vm_clusters (MutableSequence[google.cloud.oracledatabase_v1.types.CloudVmCluster]): + The list of VM Clusters. + next_page_token (str): + A token to fetch the next page of results. + """ + + @property + def raw_page(self): + return self + + cloud_vm_clusters: MutableSequence[vm_cluster.CloudVmCluster] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=vm_cluster.CloudVmCluster, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetCloudVmClusterRequest(proto.Message): + r"""The request for ``CloudVmCluster.Get``. + + Attributes: + name (str): + Required. The name of the Cloud VM Cluster in the following + format: + projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateCloudVmClusterRequest(proto.Message): + r"""The request for ``CloudVmCluster.Create``. + + Attributes: + parent (str): + Required. The name of the parent in the + following format: + projects/{project}/locations/{location}. + cloud_vm_cluster_id (str): + Required. The ID of the VM Cluster to create. This value is + restricted to (^`a-z <[a-z0-9-]{0,61}[a-z0-9]>`__?$) and + must be a maximum of 63 characters in length. The value must + start with a letter and end with a letter or a number. + cloud_vm_cluster (google.cloud.oracledatabase_v1.types.CloudVmCluster): + Required. The resource being created + request_id (str): + Optional. An optional ID to identify the + request. This value is used to identify + duplicate requests. If you make a request with + the same request ID and the original request is + still in progress or completed, the server + ignores the second request. This prevents + clients from accidentally creating duplicate + commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + cloud_vm_cluster_id: str = proto.Field( + proto.STRING, + number=2, + ) + cloud_vm_cluster: vm_cluster.CloudVmCluster = proto.Field( + proto.MESSAGE, + number=3, + message=vm_cluster.CloudVmCluster, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class DeleteCloudVmClusterRequest(proto.Message): + r"""The request for ``CloudVmCluster.Delete``. + + Attributes: + name (str): + Required. The name of the Cloud VM Cluster in the following + format: + projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}. + request_id (str): + Optional. An optional ID to identify the + request. This value is used to identify + duplicate requests. If you make a request with + the same request ID and the original request is + still in progress or completed, the server + ignores the second request. This prevents + clients from accidentally creating duplicate + commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + force (bool): + Optional. If set to true, all child resources + for the VM Cluster will be deleted. A VM Cluster + can only be deleted once all its child resources + have been deleted. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + force: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class ListEntitlementsRequest(proto.Message): + r"""The request for ``Entitlement.List``. + + Attributes: + parent (str): + Required. The parent value for the + entitlement in the following format: + projects/{project}/locations/{location}. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, a maximum of 50 + entitlements will be returned. The maximum value + is 1000. + page_token (str): + Optional. A token identifying a page of + results the server should return. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListEntitlementsResponse(proto.Message): + r"""The response for ``Entitlement.List``. + + Attributes: + entitlements (MutableSequence[google.cloud.oracledatabase_v1.types.Entitlement]): + The list of Entitlements + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + entitlements: MutableSequence[entitlement.Entitlement] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=entitlement.Entitlement, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListDbServersRequest(proto.Message): + r"""The request for ``DbServer.List``. + + Attributes: + parent (str): + Required. The parent value for database + server in the following format: + projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloudExadataInfrastructure}. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, a maximum of 50 db + servers will be returned. The maximum value is + 1000; values above 1000 will be reset to 1000. + page_token (str): + Optional. A token identifying a page of + results the server should return. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListDbServersResponse(proto.Message): + r"""The response for ``DbServer.List``. + + Attributes: + db_servers (MutableSequence[google.cloud.oracledatabase_v1.types.DbServer]): + The list of database servers. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + db_servers: MutableSequence[db_server.DbServer] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=db_server.DbServer, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListDbNodesRequest(proto.Message): + r"""The request for ``DbNode.List``. + + Attributes: + parent (str): + Required. The parent value for database node + in the following format: + projects/{project}/locations/{location}/cloudVmClusters/{cloudVmCluster}. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, at most 50 db nodes will + be returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. A token identifying a page of + results the node should return. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListDbNodesResponse(proto.Message): + r"""The response for ``DbNode.List``. + + Attributes: + db_nodes (MutableSequence[google.cloud.oracledatabase_v1.types.DbNode]): + The list of DB Nodes + next_page_token (str): + A token identifying a page of results the + node should return. + """ + + @property + def raw_page(self): + return self + + db_nodes: MutableSequence[db_node.DbNode] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=db_node.DbNode, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListGiVersionsRequest(proto.Message): + r"""The request for ``GiVersion.List``. + + Attributes: + parent (str): + Required. The parent value for Grid + Infrastructure Version in the following format: + Format: projects/{project}/locations/{location}. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, a maximum of 50 Oracle + Grid Infrastructure (GI) versions will be + returned. The maximum value is 1000; values + above 1000 will be reset to 1000. + page_token (str): + Optional. A token identifying a page of + results the server should return. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListGiVersionsResponse(proto.Message): + r"""The response for ``GiVersion.List``. + + Attributes: + gi_versions (MutableSequence[google.cloud.oracledatabase_v1.types.GiVersion]): + The list of Oracle Grid Infrastructure (GI) + versions. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + gi_versions: MutableSequence[gi_version.GiVersion] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gi_version.GiVersion, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListDbSystemShapesRequest(proto.Message): + r"""The request for ``DbSystemShape.List``. + + Attributes: + parent (str): + Required. The parent value for Database + System Shapes in the following format: + projects/{project}/locations/{location}. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, at most 50 database + system shapes will be returned. The maximum + value is 1000; values above 1000 will be coerced + to 1000. + page_token (str): + Optional. A token identifying a page of + results the server should return. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListDbSystemShapesResponse(proto.Message): + r"""The response for ``DbSystemShape.List``. + + Attributes: + db_system_shapes (MutableSequence[google.cloud.oracledatabase_v1.types.DbSystemShape]): + The list of Database System shapes. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + db_system_shapes: MutableSequence[db_system_shape.DbSystemShape] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=db_system_shape.DbSystemShape, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of the long-running operation. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. The status of the operation. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have been + cancelled successfully have [Operation.error][] value with a + [google.rpc.Status.code][google.rpc.Status.code] of 1, + corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + percent_complete (float): + Output only. An estimated percentage of the + operation that has been completed at a given + moment of time, between 0 and 100. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=6, + ) + api_version: str = proto.Field( + proto.STRING, + number=7, + ) + percent_complete: float = proto.Field( + proto.DOUBLE, + number=8, + ) + + +class ListAutonomousDatabasesRequest(proto.Message): + r"""The request for ``AutonomousDatabase.List``. + + Attributes: + parent (str): + Required. The parent value for the Autonomous + Database in the following format: + projects/{project}/locations/{location}. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, at most 50 Autonomous + Database will be returned. The maximum value is + 1000; values above 1000 will be coerced to 1000. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. An expression for filtering the + results of the request. + order_by (str): + Optional. An expression for ordering the + results of the request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListAutonomousDatabasesResponse(proto.Message): + r"""The response for ``AutonomousDatabase.List``. + + Attributes: + autonomous_databases (MutableSequence[google.cloud.oracledatabase_v1.types.AutonomousDatabase]): + The list of Autonomous Databases. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + autonomous_databases: MutableSequence[gco_autonomous_database.AutonomousDatabase] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gco_autonomous_database.AutonomousDatabase, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetAutonomousDatabaseRequest(proto.Message): + r"""The request for ``AutonomousDatabase.Get``. + + Attributes: + name (str): + Required. The name of the Autonomous Database in the + following format: + projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateAutonomousDatabaseRequest(proto.Message): + r"""The request for ``AutonomousDatabase.Create``. + + Attributes: + parent (str): + Required. The name of the parent in the + following format: + projects/{project}/locations/{location}. + autonomous_database_id (str): + Required. The ID of the Autonomous Database to create. This + value is restricted to + (^`a-z <[a-z0-9-]{0,61}[a-z0-9]>`__?$) and must be a maximum + of 63 characters in length. The value must start with a + letter and end with a letter or a number. + autonomous_database (google.cloud.oracledatabase_v1.types.AutonomousDatabase): + Required. The Autonomous Database being + created. + request_id (str): + Optional. An optional ID to identify the + request. This value is used to identify + duplicate requests. If you make a request with + the same request ID and the original request is + still in progress or completed, the server + ignores the second request. This prevents + clients from accidentally creating duplicate + commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + autonomous_database_id: str = proto.Field( + proto.STRING, + number=2, + ) + autonomous_database: gco_autonomous_database.AutonomousDatabase = proto.Field( + proto.MESSAGE, + number=3, + message=gco_autonomous_database.AutonomousDatabase, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class DeleteAutonomousDatabaseRequest(proto.Message): + r"""The request for ``AutonomousDatabase.Delete``. + + Attributes: + name (str): + Required. The name of the resource in the following format: + projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}. + request_id (str): + Optional. An optional ID to identify the + request. This value is used to identify + duplicate requests. If you make a request with + the same request ID and the original request is + still in progress or completed, the server + ignores the second request. This prevents + clients from accidentally creating duplicate + commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class RestoreAutonomousDatabaseRequest(proto.Message): + r"""The request for ``AutonomousDatabase.Restore``. + + Attributes: + name (str): + Required. The name of the Autonomous Database in the + following format: + projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}. + restore_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The time and date to restore the + database to. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + restore_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class GenerateAutonomousDatabaseWalletRequest(proto.Message): + r"""The request for ``AutonomousDatabase.GenerateWallet``. + + Attributes: + name (str): + Required. The name of the Autonomous Database in the + following format: + projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}. + type_ (google.cloud.oracledatabase_v1.types.GenerateType): + Optional. The type of wallet generation for + the Autonomous Database. The default value is + SINGLE. + is_regional (bool): + Optional. True when requesting regional + connection strings in PDB connect info, + applicable to cross-region Data Guard only. + password (str): + Required. The password used to encrypt the + keys inside the wallet. The password must be a + minimum of 8 characters. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: gco_autonomous_database.GenerateType = proto.Field( + proto.ENUM, + number=2, + enum=gco_autonomous_database.GenerateType, + ) + is_regional: bool = proto.Field( + proto.BOOL, + number=3, + ) + password: str = proto.Field( + proto.STRING, + number=4, + ) + + +class GenerateAutonomousDatabaseWalletResponse(proto.Message): + r"""The response for ``AutonomousDatabase.GenerateWallet``. + + Attributes: + archive_content (bytes): + Output only. The base64 encoded wallet files. + """ + + archive_content: bytes = proto.Field( + proto.BYTES, + number=1, + ) + + +class ListAutonomousDbVersionsRequest(proto.Message): + r"""The request for ``AutonomousDbVersion.List``. + + Attributes: + parent (str): + Required. The parent value for the Autonomous + Database in the following format: + projects/{project}/locations/{location}. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, at most 50 Autonomous DB + Versions will be returned. The maximum value is + 1000; values above 1000 will be coerced to 1000. + page_token (str): + Optional. A token identifying a page of + results the server should return. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListAutonomousDbVersionsResponse(proto.Message): + r"""The response for ``AutonomousDbVersion.List``. + + Attributes: + autonomous_db_versions (MutableSequence[google.cloud.oracledatabase_v1.types.AutonomousDbVersion]): + The list of Autonomous Database versions. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + autonomous_db_versions: MutableSequence[autonomous_db_version.AutonomousDbVersion] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=autonomous_db_version.AutonomousDbVersion, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListAutonomousDatabaseCharacterSetsRequest(proto.Message): + r"""The request for ``AutonomousDatabaseCharacterSet.List``. + + Attributes: + parent (str): + Required. The parent value for the Autonomous + Database in the following format: + projects/{project}/locations/{location}. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, at most 50 Autonomous DB + Character Sets will be returned. The maximum + value is 1000; values above 1000 will be coerced + to 1000. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. An expression for filtering the results of the + request. Only the **character_set_type** field is supported + in the following format: + ``character_set_type="{characterSetType}"``. Accepted values + include ``DATABASE`` and ``NATIONAL``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListAutonomousDatabaseCharacterSetsResponse(proto.Message): + r"""The response for ``AutonomousDatabaseCharacterSet.List``. + + Attributes: + autonomous_database_character_sets (MutableSequence[google.cloud.oracledatabase_v1.types.AutonomousDatabaseCharacterSet]): + The list of Autonomous Database Character + Sets. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + autonomous_database_character_sets: MutableSequence[autonomous_database_character_set.AutonomousDatabaseCharacterSet] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=autonomous_database_character_set.AutonomousDatabaseCharacterSet, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListAutonomousDatabaseBackupsRequest(proto.Message): + r"""The request for ``AutonomousDatabaseBackup.List``. + + Attributes: + parent (str): + Required. The parent value for + ListAutonomousDatabaseBackups in the following + format: projects/{project}/locations/{location}. + filter (str): + Optional. An expression for filtering the results of the + request. Only the **autonomous_database_id** field is + supported in the following format: + ``autonomous_database_id="{autonomous_database_id}"``. The + accepted values must be a valid Autonomous Database ID, + limited to the naming restrictions of the ID: + ^\ `a-z <[a-z0-9-]{0,61}[a-z0-9]>`__?$). The ID must start + with a letter, end with a letter or a number, and be a + maximum of 63 characters. + page_size (int): + Optional. The maximum number of items to + return. If unspecified, at most 50 Autonomous DB + Backups will be returned. The maximum value is + 1000; values above 1000 will be coerced to 1000. + page_token (str): + Optional. A token identifying a page of + results the server should return. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListAutonomousDatabaseBackupsResponse(proto.Message): + r"""The response for ``AutonomousDatabaseBackup.List``. + + Attributes: + autonomous_database_backups (MutableSequence[google.cloud.oracledatabase_v1.types.AutonomousDatabaseBackup]): + The list of Autonomous Database Backups. + next_page_token (str): + A token identifying a page of results the + server should return. + """ + + @property + def raw_page(self): + return self + + autonomous_database_backups: MutableSequence[autonomous_db_backup.AutonomousDatabaseBackup] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=autonomous_db_backup.AutonomousDatabaseBackup, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/vm_cluster.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/vm_cluster.py new file mode 100644 index 000000000000..3fe0ae3b740b --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/vm_cluster.py @@ -0,0 +1,438 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import datetime_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.oracledatabase.v1', + manifest={ + 'CloudVmCluster', + 'CloudVmClusterProperties', + 'DataCollectionOptions', + }, +) + + +class CloudVmCluster(proto.Message): + r"""Details of the Cloud VM Cluster resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/CloudVmCluster/ + + Attributes: + name (str): + Identifier. The name of the VM Cluster resource with the + format: + projects/{project}/locations/{region}/cloudVmClusters/{cloud_vm_cluster} + exadata_infrastructure (str): + Required. The name of the Exadata Infrastructure resource on + which VM cluster resource is created, in the following + format: + projects/{project}/locations/{region}/cloudExadataInfrastuctures/{cloud_extradata_infrastructure} + display_name (str): + Optional. User friendly name for this + resource. + gcp_oracle_zone (str): + Output only. Google Cloud Platform location + where Oracle Exadata is hosted. It is same as + Google Cloud Platform Oracle zone of Exadata + infrastructure. + properties (google.cloud.oracledatabase_v1.types.CloudVmClusterProperties): + Optional. Various properties of the VM + Cluster. + labels (MutableMapping[str, str]): + Optional. Labels or tags associated with the + VM Cluster. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The date and time that the VM + cluster was created. + cidr (str): + Required. Network settings. CIDR to use for + cluster IP allocation. + backup_subnet_cidr (str): + Required. CIDR range of the backup subnet. + network (str): + Required. The name of the VPC network. + Format: + projects/{project}/global/networks/{network} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + exadata_infrastructure: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + gcp_oracle_zone: str = proto.Field( + proto.STRING, + number=12, + ) + properties: 'CloudVmClusterProperties' = proto.Field( + proto.MESSAGE, + number=6, + message='CloudVmClusterProperties', + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + cidr: str = proto.Field( + proto.STRING, + number=9, + ) + backup_subnet_cidr: str = proto.Field( + proto.STRING, + number=10, + ) + network: str = proto.Field( + proto.STRING, + number=11, + ) + + +class CloudVmClusterProperties(proto.Message): + r"""Various properties and settings associated with Exadata VM + cluster. + + Attributes: + ocid (str): + Output only. Oracle Cloud Infrastructure ID + of VM Cluster. + license_type (google.cloud.oracledatabase_v1.types.CloudVmClusterProperties.LicenseType): + Required. License type of VM Cluster. + gi_version (str): + Optional. Grid Infrastructure Version. + time_zone (google.type.datetime_pb2.TimeZone): + Optional. Time zone of VM Cluster to set. + Defaults to UTC if not specified. + ssh_public_keys (MutableSequence[str]): + Optional. SSH public keys to be stored with + cluster. + node_count (int): + Optional. Number of database servers. + shape (str): + Output only. Shape of VM Cluster. + ocpu_count (float): + Optional. OCPU count per VM. Minimum is 0.1. + memory_size_gb (int): + Optional. Memory allocated in GBs. + db_node_storage_size_gb (int): + Optional. Local storage per VM. + storage_size_gb (int): + Output only. The storage allocation for the + disk group, in gigabytes (GB). + data_storage_size_tb (float): + Optional. The data disk group size to be + allocated in TBs. + disk_redundancy (google.cloud.oracledatabase_v1.types.CloudVmClusterProperties.DiskRedundancy): + Optional. The type of redundancy. + sparse_diskgroup_enabled (bool): + Optional. Use exadata sparse snapshots. + local_backup_enabled (bool): + Optional. Use local backup. + hostname_prefix (str): + Optional. Prefix for VM cluster host names. + diagnostics_data_collection_options (google.cloud.oracledatabase_v1.types.DataCollectionOptions): + Optional. Data collection options for + diagnostics. + state (google.cloud.oracledatabase_v1.types.CloudVmClusterProperties.State): + Output only. State of the cluster. + scan_listener_port_tcp (int): + Output only. SCAN listener port - TCP + scan_listener_port_tcp_ssl (int): + Output only. SCAN listener port - TLS + domain (str): + Output only. Parent DNS domain where SCAN DNS + and hosts names are qualified. ex: + ocispdelegated.ocisp10jvnet.oraclevcn.com + scan_dns (str): + Output only. SCAN DNS name. + ex: + sp2-yi0xq-scan.ocispdelegated.ocisp10jvnet.oraclevcn.com + hostname (str): + Output only. host name without domain. format: + "-" with some suffix. ex: sp2-yi0xq where + "sp2" is the hostname_prefix. + cpu_core_count (int): + Required. Number of enabled CPU cores. + system_version (str): + Output only. Operating system version of the + image. + scan_ip_ids (MutableSequence[str]): + Output only. OCIDs of scan IPs. + scan_dns_record_id (str): + Output only. OCID of scan DNS record. + oci_url (str): + Output only. Deep link to the OCI console to + view this resource. + db_server_ocids (MutableSequence[str]): + Optional. OCID of database servers. + compartment_id (str): + Output only. Compartment ID of cluster. + dns_listener_ip (str): + Output only. DNS listener IP. + cluster_name (str): + Optional. OCI Cluster name. + """ + class LicenseType(proto.Enum): + r"""Different licenses supported. + + Values: + LICENSE_TYPE_UNSPECIFIED (0): + Unspecified + LICENSE_INCLUDED (1): + License included part of offer + BRING_YOUR_OWN_LICENSE (2): + Bring your own license + """ + LICENSE_TYPE_UNSPECIFIED = 0 + LICENSE_INCLUDED = 1 + BRING_YOUR_OWN_LICENSE = 2 + + class DiskRedundancy(proto.Enum): + r"""Types of disk redundancy provided by Oracle. + + Values: + DISK_REDUNDANCY_UNSPECIFIED (0): + Unspecified. + HIGH (1): + High - 3 way mirror. + NORMAL (2): + Normal - 2 way mirror. + """ + DISK_REDUNDANCY_UNSPECIFIED = 0 + HIGH = 1 + NORMAL = 2 + + class State(proto.Enum): + r"""The various lifecycle states of the VM cluster. + + Values: + STATE_UNSPECIFIED (0): + Default unspecified value. + PROVISIONING (1): + Indicates that the resource is in + provisioning state. + AVAILABLE (2): + Indicates that the resource is in available + state. + UPDATING (3): + Indicates that the resource is in updating + state. + TERMINATING (4): + Indicates that the resource is in terminating + state. + TERMINATED (5): + Indicates that the resource is in terminated + state. + FAILED (6): + Indicates that the resource is in failed + state. + MAINTENANCE_IN_PROGRESS (7): + Indicates that the resource is in maintenance + in progress state. + """ + STATE_UNSPECIFIED = 0 + PROVISIONING = 1 + AVAILABLE = 2 + UPDATING = 3 + TERMINATING = 4 + TERMINATED = 5 + FAILED = 6 + MAINTENANCE_IN_PROGRESS = 7 + + ocid: str = proto.Field( + proto.STRING, + number=1, + ) + license_type: LicenseType = proto.Field( + proto.ENUM, + number=2, + enum=LicenseType, + ) + gi_version: str = proto.Field( + proto.STRING, + number=3, + ) + time_zone: datetime_pb2.TimeZone = proto.Field( + proto.MESSAGE, + number=4, + message=datetime_pb2.TimeZone, + ) + ssh_public_keys: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + node_count: int = proto.Field( + proto.INT32, + number=6, + ) + shape: str = proto.Field( + proto.STRING, + number=7, + ) + ocpu_count: float = proto.Field( + proto.FLOAT, + number=8, + ) + memory_size_gb: int = proto.Field( + proto.INT32, + number=9, + ) + db_node_storage_size_gb: int = proto.Field( + proto.INT32, + number=10, + ) + storage_size_gb: int = proto.Field( + proto.INT32, + number=11, + ) + data_storage_size_tb: float = proto.Field( + proto.DOUBLE, + number=12, + ) + disk_redundancy: DiskRedundancy = proto.Field( + proto.ENUM, + number=13, + enum=DiskRedundancy, + ) + sparse_diskgroup_enabled: bool = proto.Field( + proto.BOOL, + number=14, + ) + local_backup_enabled: bool = proto.Field( + proto.BOOL, + number=15, + ) + hostname_prefix: str = proto.Field( + proto.STRING, + number=16, + ) + diagnostics_data_collection_options: 'DataCollectionOptions' = proto.Field( + proto.MESSAGE, + number=19, + message='DataCollectionOptions', + ) + state: State = proto.Field( + proto.ENUM, + number=20, + enum=State, + ) + scan_listener_port_tcp: int = proto.Field( + proto.INT32, + number=21, + ) + scan_listener_port_tcp_ssl: int = proto.Field( + proto.INT32, + number=22, + ) + domain: str = proto.Field( + proto.STRING, + number=23, + ) + scan_dns: str = proto.Field( + proto.STRING, + number=24, + ) + hostname: str = proto.Field( + proto.STRING, + number=25, + ) + cpu_core_count: int = proto.Field( + proto.INT32, + number=26, + ) + system_version: str = proto.Field( + proto.STRING, + number=27, + ) + scan_ip_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=28, + ) + scan_dns_record_id: str = proto.Field( + proto.STRING, + number=29, + ) + oci_url: str = proto.Field( + proto.STRING, + number=30, + ) + db_server_ocids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=31, + ) + compartment_id: str = proto.Field( + proto.STRING, + number=32, + ) + dns_listener_ip: str = proto.Field( + proto.STRING, + number=35, + ) + cluster_name: str = proto.Field( + proto.STRING, + number=36, + ) + + +class DataCollectionOptions(proto.Message): + r"""Data collection options for diagnostics. + + Attributes: + diagnostics_events_enabled (bool): + Optional. Indicates whether diagnostic + collection is enabled for the VM cluster + health_monitoring_enabled (bool): + Optional. Indicates whether health monitoring + is enabled for the VM cluster + incident_logs_enabled (bool): + Optional. Indicates whether incident logs and + trace collection are enabled for the VM cluster + """ + + diagnostics_events_enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + health_monitoring_enabled: bool = proto.Field( + proto.BOOL, + number=2, + ) + incident_logs_enabled: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/mypy.ini b/owl-bot-staging/google-cloud-oracledatabase/v1/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/noxfile.py b/owl-bot-staging/google-cloud-oracledatabase/v1/noxfile.py new file mode 100644 index 000000000000..f7f7ff3c7bf6 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/noxfile.py @@ -0,0 +1,278 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import re +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12" +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = 'google-cloud-oracledatabase' + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.12" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "prerelease_deps", +] + +@nox.session(python=ALL_PYTHON) +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def unit(session, protobuf_implementation): + """Run the unit test suite.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") + + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/oracledatabase_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + +@nox.session(python=ALL_PYTHON[-1]) +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def prerelease_deps(session, protobuf_implementation): + """Run the unit test suite against pre-release versions of dependencies.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install test environment dependencies + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + + # Install the package without dependencies + session.install('-e', '.', '--no-deps') + + # We test the minimum dependency versions using the minimum Python + # version so the lowest python runtime that we test has a corresponding constraints + # file, located at `testing/constraints--.txt`, which contains all of the + # dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + "grpcio", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + + session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run( + "python", "-c", "import proto; print(proto.__version__)" + ) + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/oracledatabase_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '-p', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_autonomous_database_sync.py b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_autonomous_database_sync.py new file mode 100644 index 000000000000..8812689ffc11 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_autonomous_database_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAutonomousDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_CreateAutonomousDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_create_autonomous_database(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + autonomous_database = oracledatabase_v1.AutonomousDatabase() + autonomous_database.network = "network_value" + autonomous_database.cidr = "cidr_value" + + request = oracledatabase_v1.CreateAutonomousDatabaseRequest( + parent="parent_value", + autonomous_database_id="autonomous_database_id_value", + autonomous_database=autonomous_database, + ) + + # Make the request + operation = client.create_autonomous_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_CreateAutonomousDatabase_sync] diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_exadata_infrastructure_sync.py b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_exadata_infrastructure_sync.py new file mode 100644 index 000000000000..298e7b566d84 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_exadata_infrastructure_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCloudExadataInfrastructure +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_CreateCloudExadataInfrastructure_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_create_cloud_exadata_infrastructure(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.CreateCloudExadataInfrastructureRequest( + parent="parent_value", + cloud_exadata_infrastructure_id="cloud_exadata_infrastructure_id_value", + ) + + # Make the request + operation = client.create_cloud_exadata_infrastructure(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_CreateCloudExadataInfrastructure_sync] diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_vm_cluster_sync.py b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_vm_cluster_sync.py new file mode 100644 index 000000000000..4f172f9ce515 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_vm_cluster_sync.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCloudVmCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_CreateCloudVmCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_create_cloud_vm_cluster(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + cloud_vm_cluster = oracledatabase_v1.CloudVmCluster() + cloud_vm_cluster.exadata_infrastructure = "exadata_infrastructure_value" + cloud_vm_cluster.cidr = "cidr_value" + cloud_vm_cluster.backup_subnet_cidr = "backup_subnet_cidr_value" + cloud_vm_cluster.network = "network_value" + + request = oracledatabase_v1.CreateCloudVmClusterRequest( + parent="parent_value", + cloud_vm_cluster_id="cloud_vm_cluster_id_value", + cloud_vm_cluster=cloud_vm_cluster, + ) + + # Make the request + operation = client.create_cloud_vm_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_CreateCloudVmCluster_sync] diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_autonomous_database_sync.py b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_autonomous_database_sync.py new file mode 100644 index 000000000000..44f792f2869d --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_autonomous_database_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAutonomousDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_DeleteAutonomousDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_delete_autonomous_database(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.DeleteAutonomousDatabaseRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_autonomous_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_DeleteAutonomousDatabase_sync] diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_exadata_infrastructure_sync.py b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_exadata_infrastructure_sync.py new file mode 100644 index 000000000000..bbbc8441a482 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_exadata_infrastructure_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCloudExadataInfrastructure +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_DeleteCloudExadataInfrastructure_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_delete_cloud_exadata_infrastructure(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.DeleteCloudExadataInfrastructureRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_cloud_exadata_infrastructure(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_DeleteCloudExadataInfrastructure_sync] diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_vm_cluster_sync.py b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_vm_cluster_sync.py new file mode 100644 index 000000000000..42bdd4641c93 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_vm_cluster_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCloudVmCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_DeleteCloudVmCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_delete_cloud_vm_cluster(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.DeleteCloudVmClusterRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_cloud_vm_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_DeleteCloudVmCluster_sync] diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_generate_autonomous_database_wallet_sync.py b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_generate_autonomous_database_wallet_sync.py new file mode 100644 index 000000000000..3f2cdc2849ae --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_generate_autonomous_database_wallet_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateAutonomousDatabaseWallet +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_GenerateAutonomousDatabaseWallet_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_generate_autonomous_database_wallet(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.GenerateAutonomousDatabaseWalletRequest( + name="name_value", + password="password_value", + ) + + # Make the request + response = client.generate_autonomous_database_wallet(request=request) + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_GenerateAutonomousDatabaseWallet_sync] diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_autonomous_database_sync.py b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_autonomous_database_sync.py new file mode 100644 index 000000000000..6273b25ca4e7 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_autonomous_database_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAutonomousDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_GetAutonomousDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_get_autonomous_database(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.GetAutonomousDatabaseRequest( + name="name_value", + ) + + # Make the request + response = client.get_autonomous_database(request=request) + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_GetAutonomousDatabase_sync] diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_exadata_infrastructure_sync.py b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_exadata_infrastructure_sync.py new file mode 100644 index 000000000000..abbfae94a61b --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_exadata_infrastructure_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCloudExadataInfrastructure +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_GetCloudExadataInfrastructure_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_get_cloud_exadata_infrastructure(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.GetCloudExadataInfrastructureRequest( + name="name_value", + ) + + # Make the request + response = client.get_cloud_exadata_infrastructure(request=request) + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_GetCloudExadataInfrastructure_sync] diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_vm_cluster_sync.py b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_vm_cluster_sync.py new file mode 100644 index 000000000000..76e6c851c122 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_vm_cluster_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCloudVmCluster +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_GetCloudVmCluster_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_get_cloud_vm_cluster(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.GetCloudVmClusterRequest( + name="name_value", + ) + + # Make the request + response = client.get_cloud_vm_cluster(request=request) + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_GetCloudVmCluster_sync] diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_backups_sync.py b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_backups_sync.py new file mode 100644 index 000000000000..217be56f9f1e --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_backups_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAutonomousDatabaseBackups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListAutonomousDatabaseBackups_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_autonomous_database_backups(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListAutonomousDatabaseBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_autonomous_database_backups(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListAutonomousDatabaseBackups_sync] diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_character_sets_sync.py b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_character_sets_sync.py new file mode 100644 index 000000000000..d33d179e389a --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_character_sets_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAutonomousDatabaseCharacterSets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListAutonomousDatabaseCharacterSets_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_autonomous_database_character_sets(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListAutonomousDatabaseCharacterSetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_autonomous_database_character_sets(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListAutonomousDatabaseCharacterSets_sync] diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_databases_sync.py b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_databases_sync.py new file mode 100644 index 000000000000..4153a3ad56f5 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_databases_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAutonomousDatabases +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListAutonomousDatabases_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_autonomous_databases(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListAutonomousDatabasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_autonomous_databases(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListAutonomousDatabases_sync] diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_db_versions_sync.py b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_db_versions_sync.py new file mode 100644 index 000000000000..3ffeb9c12ee1 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_db_versions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAutonomousDbVersions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListAutonomousDbVersions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_autonomous_db_versions(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListAutonomousDbVersionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_autonomous_db_versions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListAutonomousDbVersions_sync] diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_exadata_infrastructures_sync.py b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_exadata_infrastructures_sync.py new file mode 100644 index 000000000000..9fa96f7b3216 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_exadata_infrastructures_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCloudExadataInfrastructures +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListCloudExadataInfrastructures_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_cloud_exadata_infrastructures(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListCloudExadataInfrastructuresRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_cloud_exadata_infrastructures(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListCloudExadataInfrastructures_sync] diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_vm_clusters_sync.py b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_vm_clusters_sync.py new file mode 100644 index 000000000000..4d768bee445f --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_vm_clusters_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCloudVmClusters +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListCloudVmClusters_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_cloud_vm_clusters(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListCloudVmClustersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_cloud_vm_clusters(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListCloudVmClusters_sync] diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_nodes_sync.py b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_nodes_sync.py new file mode 100644 index 000000000000..5aeb00470993 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_nodes_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDbNodes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListDbNodes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_db_nodes(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListDbNodesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_db_nodes(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListDbNodes_sync] diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_servers_sync.py b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_servers_sync.py new file mode 100644 index 000000000000..7daf125eb879 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_servers_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDbServers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListDbServers_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_db_servers(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListDbServersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_db_servers(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListDbServers_sync] diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_system_shapes_sync.py b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_system_shapes_sync.py new file mode 100644 index 000000000000..851a38768000 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_system_shapes_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDbSystemShapes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListDbSystemShapes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_db_system_shapes(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListDbSystemShapesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_db_system_shapes(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListDbSystemShapes_sync] diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_entitlements_sync.py b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_entitlements_sync.py new file mode 100644 index 000000000000..cf7ff1ebdfea --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_entitlements_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntitlements +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListEntitlements_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_entitlements(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListEntitlementsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entitlements(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListEntitlements_sync] diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_gi_versions_sync.py b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_gi_versions_sync.py new file mode 100644 index 000000000000..23cd229e7552 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_gi_versions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListGiVersions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_ListGiVersions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_list_gi_versions(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.ListGiVersionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_gi_versions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_ListGiVersions_sync] diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_restore_autonomous_database_sync.py b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_restore_autonomous_database_sync.py new file mode 100644 index 000000000000..d0e6cf445d21 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_restore_autonomous_database_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestoreAutonomousDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-oracledatabase + + +# [START oracledatabase_v1_generated_OracleDatabase_RestoreAutonomousDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import oracledatabase_v1 + + +def sample_restore_autonomous_database(): + # Create a client + client = oracledatabase_v1.OracleDatabaseClient() + + # Initialize request argument(s) + request = oracledatabase_v1.RestoreAutonomousDatabaseRequest( + name="name_value", + ) + + # Make the request + operation = client.restore_autonomous_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END oracledatabase_v1_generated_OracleDatabase_RestoreAutonomousDatabase_sync] diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/snippet_metadata_google.cloud.oracledatabase.v1.json b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/snippet_metadata_google.cloud.oracledatabase.v1.json new file mode 100644 index 000000000000..f8cf3fba2f4a --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/snippet_metadata_google.cloud.oracledatabase.v1.json @@ -0,0 +1,1815 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.oracledatabase.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-oracledatabase", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.create_autonomous_database", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.CreateAutonomousDatabase", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "CreateAutonomousDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.CreateAutonomousDatabaseRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "autonomous_database", + "type": "google.cloud.oracledatabase_v1.types.AutonomousDatabase" + }, + { + "name": "autonomous_database_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_autonomous_database" + }, + "description": "Sample for CreateAutonomousDatabase", + "file": "oracledatabase_v1_generated_oracle_database_create_autonomous_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_CreateAutonomousDatabase_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_create_autonomous_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.create_cloud_exadata_infrastructure", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.CreateCloudExadataInfrastructure", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "CreateCloudExadataInfrastructure" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.CreateCloudExadataInfrastructureRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "cloud_exadata_infrastructure", + "type": "google.cloud.oracledatabase_v1.types.CloudExadataInfrastructure" + }, + { + "name": "cloud_exadata_infrastructure_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_cloud_exadata_infrastructure" + }, + "description": "Sample for CreateCloudExadataInfrastructure", + "file": "oracledatabase_v1_generated_oracle_database_create_cloud_exadata_infrastructure_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_CreateCloudExadataInfrastructure_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_create_cloud_exadata_infrastructure_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.create_cloud_vm_cluster", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.CreateCloudVmCluster", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "CreateCloudVmCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.CreateCloudVmClusterRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "cloud_vm_cluster", + "type": "google.cloud.oracledatabase_v1.types.CloudVmCluster" + }, + { + "name": "cloud_vm_cluster_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_cloud_vm_cluster" + }, + "description": "Sample for CreateCloudVmCluster", + "file": "oracledatabase_v1_generated_oracle_database_create_cloud_vm_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_CreateCloudVmCluster_sync", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_create_cloud_vm_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.delete_autonomous_database", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.DeleteAutonomousDatabase", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "DeleteAutonomousDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.DeleteAutonomousDatabaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_autonomous_database" + }, + "description": "Sample for DeleteAutonomousDatabase", + "file": "oracledatabase_v1_generated_oracle_database_delete_autonomous_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_DeleteAutonomousDatabase_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_delete_autonomous_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.delete_cloud_exadata_infrastructure", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.DeleteCloudExadataInfrastructure", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "DeleteCloudExadataInfrastructure" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.DeleteCloudExadataInfrastructureRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_cloud_exadata_infrastructure" + }, + "description": "Sample for DeleteCloudExadataInfrastructure", + "file": "oracledatabase_v1_generated_oracle_database_delete_cloud_exadata_infrastructure_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_DeleteCloudExadataInfrastructure_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_delete_cloud_exadata_infrastructure_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.delete_cloud_vm_cluster", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.DeleteCloudVmCluster", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "DeleteCloudVmCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.DeleteCloudVmClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_cloud_vm_cluster" + }, + "description": "Sample for DeleteCloudVmCluster", + "file": "oracledatabase_v1_generated_oracle_database_delete_cloud_vm_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_DeleteCloudVmCluster_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_delete_cloud_vm_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.generate_autonomous_database_wallet", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.GenerateAutonomousDatabaseWallet", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "GenerateAutonomousDatabaseWallet" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.GenerateAutonomousDatabaseWalletRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "type_", + "type": "google.cloud.oracledatabase_v1.types.GenerateType" + }, + { + "name": "is_regional", + "type": "bool" + }, + { + "name": "password", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.types.GenerateAutonomousDatabaseWalletResponse", + "shortName": "generate_autonomous_database_wallet" + }, + "description": "Sample for GenerateAutonomousDatabaseWallet", + "file": "oracledatabase_v1_generated_oracle_database_generate_autonomous_database_wallet_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_GenerateAutonomousDatabaseWallet_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_generate_autonomous_database_wallet_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.get_autonomous_database", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.GetAutonomousDatabase", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "GetAutonomousDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.GetAutonomousDatabaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.types.AutonomousDatabase", + "shortName": "get_autonomous_database" + }, + "description": "Sample for GetAutonomousDatabase", + "file": "oracledatabase_v1_generated_oracle_database_get_autonomous_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_GetAutonomousDatabase_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_get_autonomous_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.get_cloud_exadata_infrastructure", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.GetCloudExadataInfrastructure", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "GetCloudExadataInfrastructure" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.GetCloudExadataInfrastructureRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.types.CloudExadataInfrastructure", + "shortName": "get_cloud_exadata_infrastructure" + }, + "description": "Sample for GetCloudExadataInfrastructure", + "file": "oracledatabase_v1_generated_oracle_database_get_cloud_exadata_infrastructure_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_GetCloudExadataInfrastructure_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_get_cloud_exadata_infrastructure_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.get_cloud_vm_cluster", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.GetCloudVmCluster", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "GetCloudVmCluster" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.GetCloudVmClusterRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.types.CloudVmCluster", + "shortName": "get_cloud_vm_cluster" + }, + "description": "Sample for GetCloudVmCluster", + "file": "oracledatabase_v1_generated_oracle_database_get_cloud_vm_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_GetCloudVmCluster_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_get_cloud_vm_cluster_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_autonomous_database_backups", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListAutonomousDatabaseBackups", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListAutonomousDatabaseBackups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseBackupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListAutonomousDatabaseBackupsPager", + "shortName": "list_autonomous_database_backups" + }, + "description": "Sample for ListAutonomousDatabaseBackups", + "file": "oracledatabase_v1_generated_oracle_database_list_autonomous_database_backups_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListAutonomousDatabaseBackups_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_autonomous_database_backups_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_autonomous_database_character_sets", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListAutonomousDatabaseCharacterSets", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListAutonomousDatabaseCharacterSets" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListAutonomousDatabaseCharacterSetsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListAutonomousDatabaseCharacterSetsPager", + "shortName": "list_autonomous_database_character_sets" + }, + "description": "Sample for ListAutonomousDatabaseCharacterSets", + "file": "oracledatabase_v1_generated_oracle_database_list_autonomous_database_character_sets_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListAutonomousDatabaseCharacterSets_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_autonomous_database_character_sets_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_autonomous_databases", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListAutonomousDatabases", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListAutonomousDatabases" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListAutonomousDatabasesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListAutonomousDatabasesPager", + "shortName": "list_autonomous_databases" + }, + "description": "Sample for ListAutonomousDatabases", + "file": "oracledatabase_v1_generated_oracle_database_list_autonomous_databases_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListAutonomousDatabases_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_autonomous_databases_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_autonomous_db_versions", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListAutonomousDbVersions", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListAutonomousDbVersions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListAutonomousDbVersionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListAutonomousDbVersionsPager", + "shortName": "list_autonomous_db_versions" + }, + "description": "Sample for ListAutonomousDbVersions", + "file": "oracledatabase_v1_generated_oracle_database_list_autonomous_db_versions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListAutonomousDbVersions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_autonomous_db_versions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_cloud_exadata_infrastructures", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListCloudExadataInfrastructures", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListCloudExadataInfrastructures" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListCloudExadataInfrastructuresRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListCloudExadataInfrastructuresPager", + "shortName": "list_cloud_exadata_infrastructures" + }, + "description": "Sample for ListCloudExadataInfrastructures", + "file": "oracledatabase_v1_generated_oracle_database_list_cloud_exadata_infrastructures_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListCloudExadataInfrastructures_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_cloud_exadata_infrastructures_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_cloud_vm_clusters", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListCloudVmClusters", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListCloudVmClusters" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListCloudVmClustersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListCloudVmClustersPager", + "shortName": "list_cloud_vm_clusters" + }, + "description": "Sample for ListCloudVmClusters", + "file": "oracledatabase_v1_generated_oracle_database_list_cloud_vm_clusters_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListCloudVmClusters_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_cloud_vm_clusters_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_db_nodes", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListDbNodes", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListDbNodes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListDbNodesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListDbNodesPager", + "shortName": "list_db_nodes" + }, + "description": "Sample for ListDbNodes", + "file": "oracledatabase_v1_generated_oracle_database_list_db_nodes_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListDbNodes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_db_nodes_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_db_servers", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListDbServers", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListDbServers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListDbServersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListDbServersPager", + "shortName": "list_db_servers" + }, + "description": "Sample for ListDbServers", + "file": "oracledatabase_v1_generated_oracle_database_list_db_servers_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListDbServers_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_db_servers_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_db_system_shapes", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListDbSystemShapes", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListDbSystemShapes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListDbSystemShapesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListDbSystemShapesPager", + "shortName": "list_db_system_shapes" + }, + "description": "Sample for ListDbSystemShapes", + "file": "oracledatabase_v1_generated_oracle_database_list_db_system_shapes_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListDbSystemShapes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_db_system_shapes_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_entitlements", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListEntitlements", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListEntitlements" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListEntitlementsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListEntitlementsPager", + "shortName": "list_entitlements" + }, + "description": "Sample for ListEntitlements", + "file": "oracledatabase_v1_generated_oracle_database_list_entitlements_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListEntitlements_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_entitlements_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.list_gi_versions", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.ListGiVersions", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "ListGiVersions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.ListGiVersionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.oracledatabase_v1.services.oracle_database.pagers.ListGiVersionsPager", + "shortName": "list_gi_versions" + }, + "description": "Sample for ListGiVersions", + "file": "oracledatabase_v1_generated_oracle_database_list_gi_versions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_ListGiVersions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_list_gi_versions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient", + "shortName": "OracleDatabaseClient" + }, + "fullName": "google.cloud.oracledatabase_v1.OracleDatabaseClient.restore_autonomous_database", + "method": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase.RestoreAutonomousDatabase", + "service": { + "fullName": "google.cloud.oracledatabase.v1.OracleDatabase", + "shortName": "OracleDatabase" + }, + "shortName": "RestoreAutonomousDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.oracledatabase_v1.types.RestoreAutonomousDatabaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "restore_time", + "type": "google.protobuf.timestamp_pb2.Timestamp" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "restore_autonomous_database" + }, + "description": "Sample for RestoreAutonomousDatabase", + "file": "oracledatabase_v1_generated_oracle_database_restore_autonomous_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "oracledatabase_v1_generated_OracleDatabase_RestoreAutonomousDatabase_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "oracledatabase_v1_generated_oracle_database_restore_autonomous_database_sync.py" + } + ] +} diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/scripts/fixup_oracledatabase_v1_keywords.py b/owl-bot-staging/google-cloud-oracledatabase/v1/scripts/fixup_oracledatabase_v1_keywords.py new file mode 100644 index 000000000000..177c56933878 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/scripts/fixup_oracledatabase_v1_keywords.py @@ -0,0 +1,197 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class oracledatabaseCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_autonomous_database': ('parent', 'autonomous_database_id', 'autonomous_database', 'request_id', ), + 'create_cloud_exadata_infrastructure': ('parent', 'cloud_exadata_infrastructure_id', 'cloud_exadata_infrastructure', 'request_id', ), + 'create_cloud_vm_cluster': ('parent', 'cloud_vm_cluster_id', 'cloud_vm_cluster', 'request_id', ), + 'delete_autonomous_database': ('name', 'request_id', ), + 'delete_cloud_exadata_infrastructure': ('name', 'request_id', 'force', ), + 'delete_cloud_vm_cluster': ('name', 'request_id', 'force', ), + 'generate_autonomous_database_wallet': ('name', 'password', 'type_', 'is_regional', ), + 'get_autonomous_database': ('name', ), + 'get_cloud_exadata_infrastructure': ('name', ), + 'get_cloud_vm_cluster': ('name', ), + 'list_autonomous_database_backups': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_autonomous_database_character_sets': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_autonomous_databases': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_autonomous_db_versions': ('parent', 'page_size', 'page_token', ), + 'list_cloud_exadata_infrastructures': ('parent', 'page_size', 'page_token', ), + 'list_cloud_vm_clusters': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_db_nodes': ('parent', 'page_size', 'page_token', ), + 'list_db_servers': ('parent', 'page_size', 'page_token', ), + 'list_db_system_shapes': ('parent', 'page_size', 'page_token', ), + 'list_entitlements': ('parent', 'page_size', 'page_token', ), + 'list_gi_versions': ('parent', 'page_size', 'page_token', ), + 'restore_autonomous_database': ('name', 'restore_time', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=oracledatabaseCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the oracledatabase client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/setup.py b/owl-bot-staging/google-cloud-oracledatabase/v1/setup.py new file mode 100644 index 000000000000..ac8389232396 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/setup.py @@ -0,0 +1,93 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-oracledatabase' + + +description = "Google Cloud Oracledatabase API client library" + +version = None + +with open(os.path.join(package_root, 'google/cloud/oracledatabase/gapic_version.py')) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-oracledatabase" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.7.txt new file mode 100644 index 000000000000..fc812592b0ee --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.20.2 diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/tests/__init__.py b/owl-bot-staging/google-cloud-oracledatabase/v1/tests/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/oracledatabase_v1/__init__.py b/owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/oracledatabase_v1/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/oracledatabase_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/oracledatabase_v1/test_oracle_database.py b/owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/oracledatabase_v1/test_oracle_database.py new file mode 100644 index 000000000000..7ad6633e7c25 --- /dev/null +++ b/owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/oracledatabase_v1/test_oracle_database.py @@ -0,0 +1,8504 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.cloud.oracledatabase_v1.services.oracle_database import OracleDatabaseClient +from google.cloud.oracledatabase_v1.services.oracle_database import pagers +from google.cloud.oracledatabase_v1.services.oracle_database import transports +from google.cloud.oracledatabase_v1.types import autonomous_database +from google.cloud.oracledatabase_v1.types import autonomous_database as gco_autonomous_database +from google.cloud.oracledatabase_v1.types import autonomous_database_character_set +from google.cloud.oracledatabase_v1.types import autonomous_db_backup +from google.cloud.oracledatabase_v1.types import autonomous_db_version +from google.cloud.oracledatabase_v1.types import common +from google.cloud.oracledatabase_v1.types import db_node +from google.cloud.oracledatabase_v1.types import db_server +from google.cloud.oracledatabase_v1.types import db_system_shape +from google.cloud.oracledatabase_v1.types import entitlement +from google.cloud.oracledatabase_v1.types import exadata_infra +from google.cloud.oracledatabase_v1.types import gi_version +from google.cloud.oracledatabase_v1.types import oracledatabase +from google.cloud.oracledatabase_v1.types import vm_cluster +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import datetime_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import month_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert OracleDatabaseClient._get_default_mtls_endpoint(None) is None + assert OracleDatabaseClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert OracleDatabaseClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert OracleDatabaseClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert OracleDatabaseClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert OracleDatabaseClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert OracleDatabaseClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert OracleDatabaseClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert OracleDatabaseClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + OracleDatabaseClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert OracleDatabaseClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert OracleDatabaseClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert OracleDatabaseClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + OracleDatabaseClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert OracleDatabaseClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert OracleDatabaseClient._get_client_cert_source(None, False) is None + assert OracleDatabaseClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert OracleDatabaseClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert OracleDatabaseClient._get_client_cert_source(None, True) is mock_default_cert_source + assert OracleDatabaseClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(OracleDatabaseClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(OracleDatabaseClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = OracleDatabaseClient._DEFAULT_UNIVERSE + default_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert OracleDatabaseClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert OracleDatabaseClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == OracleDatabaseClient.DEFAULT_MTLS_ENDPOINT + assert OracleDatabaseClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert OracleDatabaseClient._get_api_endpoint(None, None, default_universe, "always") == OracleDatabaseClient.DEFAULT_MTLS_ENDPOINT + assert OracleDatabaseClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == OracleDatabaseClient.DEFAULT_MTLS_ENDPOINT + assert OracleDatabaseClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert OracleDatabaseClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + OracleDatabaseClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert OracleDatabaseClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert OracleDatabaseClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert OracleDatabaseClient._get_universe_domain(None, None) == OracleDatabaseClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + OracleDatabaseClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest"), +]) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + transport=transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class( + transport=transport_class(credentials=credentials) + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize("client_class,transport_name", [ + (OracleDatabaseClient, "rest"), +]) +def test_oracle_database_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'oracledatabase.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://oracledatabase.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.OracleDatabaseRestTransport, "rest"), +]) +def test_oracle_database_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (OracleDatabaseClient, "rest"), +]) +def test_oracle_database_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'oracledatabase.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://oracledatabase.googleapis.com' + ) + + +def test_oracle_database_client_get_transport_class(): + transport = OracleDatabaseClient.get_transport_class() + available_transports = [ + transports.OracleDatabaseRestTransport, + ] + assert transport in available_transports + + transport = OracleDatabaseClient.get_transport_class("rest") + assert transport == transports.OracleDatabaseRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest"), +]) +@mock.patch.object(OracleDatabaseClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(OracleDatabaseClient)) +def test_oracle_database_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(OracleDatabaseClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(OracleDatabaseClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest", "true"), + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest", "false"), +]) +@mock.patch.object(OracleDatabaseClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(OracleDatabaseClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_oracle_database_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + OracleDatabaseClient +]) +@mock.patch.object(OracleDatabaseClient, "DEFAULT_ENDPOINT", modify_default_endpoint(OracleDatabaseClient)) +def test_oracle_database_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + OracleDatabaseClient +]) +@mock.patch.object(OracleDatabaseClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(OracleDatabaseClient)) +def test_oracle_database_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = OracleDatabaseClient._DEFAULT_UNIVERSE + default_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest"), +]) +def test_oracle_database_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest", None), +]) +def test_oracle_database_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + oracledatabase.ListCloudExadataInfrastructuresRequest, + dict, +]) +def test_list_cloud_exadata_infrastructures_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListCloudExadataInfrastructuresResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListCloudExadataInfrastructuresResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_cloud_exadata_infrastructures(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCloudExadataInfrastructuresPager) + assert response.next_page_token == 'next_page_token_value' + +def test_list_cloud_exadata_infrastructures_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_cloud_exadata_infrastructures in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_cloud_exadata_infrastructures] = mock_rpc + + request = {} + client.list_cloud_exadata_infrastructures(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_cloud_exadata_infrastructures(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_cloud_exadata_infrastructures_rest_required_fields(request_type=oracledatabase.ListCloudExadataInfrastructuresRequest): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_cloud_exadata_infrastructures._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_cloud_exadata_infrastructures._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListCloudExadataInfrastructuresResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListCloudExadataInfrastructuresResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_cloud_exadata_infrastructures(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_cloud_exadata_infrastructures_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_cloud_exadata_infrastructures._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_cloud_exadata_infrastructures_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_cloud_exadata_infrastructures") as post, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_cloud_exadata_infrastructures") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListCloudExadataInfrastructuresRequest.pb(oracledatabase.ListCloudExadataInfrastructuresRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = oracledatabase.ListCloudExadataInfrastructuresResponse.to_json(oracledatabase.ListCloudExadataInfrastructuresResponse()) + + request = oracledatabase.ListCloudExadataInfrastructuresRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListCloudExadataInfrastructuresResponse() + + client.list_cloud_exadata_infrastructures(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_cloud_exadata_infrastructures_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListCloudExadataInfrastructuresRequest): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_cloud_exadata_infrastructures(request) + + +def test_list_cloud_exadata_infrastructures_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListCloudExadataInfrastructuresResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListCloudExadataInfrastructuresResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_cloud_exadata_infrastructures(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/cloudExadataInfrastructures" % client.transport._host, args[1]) + + +def test_list_cloud_exadata_infrastructures_rest_flattened_error(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_cloud_exadata_infrastructures( + oracledatabase.ListCloudExadataInfrastructuresRequest(), + parent='parent_value', + ) + + +def test_list_cloud_exadata_infrastructures_rest_pager(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListCloudExadataInfrastructuresResponse( + cloud_exadata_infrastructures=[ + exadata_infra.CloudExadataInfrastructure(), + exadata_infra.CloudExadataInfrastructure(), + exadata_infra.CloudExadataInfrastructure(), + ], + next_page_token='abc', + ), + oracledatabase.ListCloudExadataInfrastructuresResponse( + cloud_exadata_infrastructures=[], + next_page_token='def', + ), + oracledatabase.ListCloudExadataInfrastructuresResponse( + cloud_exadata_infrastructures=[ + exadata_infra.CloudExadataInfrastructure(), + ], + next_page_token='ghi', + ), + oracledatabase.ListCloudExadataInfrastructuresResponse( + cloud_exadata_infrastructures=[ + exadata_infra.CloudExadataInfrastructure(), + exadata_infra.CloudExadataInfrastructure(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(oracledatabase.ListCloudExadataInfrastructuresResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_cloud_exadata_infrastructures(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, exadata_infra.CloudExadataInfrastructure) + for i in results) + + pages = list(client.list_cloud_exadata_infrastructures(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + oracledatabase.GetCloudExadataInfrastructureRequest, + dict, +]) +def test_get_cloud_exadata_infrastructure_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = exadata_infra.CloudExadataInfrastructure( + name='name_value', + display_name='display_name_value', + gcp_oracle_zone='gcp_oracle_zone_value', + entitlement_id='entitlement_id_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = exadata_infra.CloudExadataInfrastructure.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_cloud_exadata_infrastructure(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, exadata_infra.CloudExadataInfrastructure) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.gcp_oracle_zone == 'gcp_oracle_zone_value' + assert response.entitlement_id == 'entitlement_id_value' + +def test_get_cloud_exadata_infrastructure_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_cloud_exadata_infrastructure in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_cloud_exadata_infrastructure] = mock_rpc + + request = {} + client.get_cloud_exadata_infrastructure(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_cloud_exadata_infrastructure(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_cloud_exadata_infrastructure_rest_required_fields(request_type=oracledatabase.GetCloudExadataInfrastructureRequest): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = exadata_infra.CloudExadataInfrastructure() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = exadata_infra.CloudExadataInfrastructure.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_cloud_exadata_infrastructure(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_cloud_exadata_infrastructure_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_cloud_exadata_infrastructure._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_cloud_exadata_infrastructure_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_get_cloud_exadata_infrastructure") as post, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_get_cloud_exadata_infrastructure") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.GetCloudExadataInfrastructureRequest.pb(oracledatabase.GetCloudExadataInfrastructureRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = exadata_infra.CloudExadataInfrastructure.to_json(exadata_infra.CloudExadataInfrastructure()) + + request = oracledatabase.GetCloudExadataInfrastructureRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = exadata_infra.CloudExadataInfrastructure() + + client.get_cloud_exadata_infrastructure(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_cloud_exadata_infrastructure_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.GetCloudExadataInfrastructureRequest): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_cloud_exadata_infrastructure(request) + + +def test_get_cloud_exadata_infrastructure_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = exadata_infra.CloudExadataInfrastructure() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = exadata_infra.CloudExadataInfrastructure.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_cloud_exadata_infrastructure(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/cloudExadataInfrastructures/*}" % client.transport._host, args[1]) + + +def test_get_cloud_exadata_infrastructure_rest_flattened_error(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_cloud_exadata_infrastructure( + oracledatabase.GetCloudExadataInfrastructureRequest(), + name='name_value', + ) + + +def test_get_cloud_exadata_infrastructure_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + oracledatabase.CreateCloudExadataInfrastructureRequest, + dict, +]) +def test_create_cloud_exadata_infrastructure_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["cloud_exadata_infrastructure"] = {'name': 'name_value', 'display_name': 'display_name_value', 'gcp_oracle_zone': 'gcp_oracle_zone_value', 'entitlement_id': 'entitlement_id_value', 'properties': {'ocid': 'ocid_value', 'compute_count': 1413, 'storage_count': 1405, 'total_storage_size_gb': 2234, 'available_storage_size_gb': 2615, 'maintenance_window': {'preference': 1, 'months': [1], 'weeks_of_month': [1497, 1498], 'days_of_week': [1], 'hours_of_day': [1283, 1284], 'lead_time_week': 1455, 'patching_mode': 1, 'custom_action_timeout_mins': 2804, 'is_custom_action_timeout_enabled': True}, 'state': 1, 'shape': 'shape_value', 'oci_url': 'oci_url_value', 'cpu_count': 976, 'max_cpu_count': 1397, 'memory_size_gb': 1499, 'max_memory_gb': 1382, 'db_node_storage_size_gb': 2401, 'max_db_node_storage_size_gb': 2822, 'data_storage_size_tb': 0.2109, 'max_data_storage_tb': 0.19920000000000002, 'activated_storage_count': 2449, 'additional_storage_count': 2549, 'db_server_version': 'db_server_version_value', 'storage_server_version': 'storage_server_version_value', 'next_maintenance_run_id': 'next_maintenance_run_id_value', 'next_maintenance_run_time': {'seconds': 751, 'nanos': 543}, 'next_security_maintenance_run_time': {}, 'customer_contacts': [{'email': 'email_value'}], 'monthly_storage_server_version': 'monthly_storage_server_version_value', 'monthly_db_server_version': 'monthly_db_server_version_value'}, 'labels': {}, 'create_time': {}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = oracledatabase.CreateCloudExadataInfrastructureRequest.meta.fields["cloud_exadata_infrastructure"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["cloud_exadata_infrastructure"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["cloud_exadata_infrastructure"][field])): + del request_init["cloud_exadata_infrastructure"][field][i][subfield] + else: + del request_init["cloud_exadata_infrastructure"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_cloud_exadata_infrastructure(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + +def test_create_cloud_exadata_infrastructure_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_cloud_exadata_infrastructure in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_cloud_exadata_infrastructure] = mock_rpc + + request = {} + client.create_cloud_exadata_infrastructure(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_cloud_exadata_infrastructure(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_cloud_exadata_infrastructure_rest_required_fields(request_type=oracledatabase.CreateCloudExadataInfrastructureRequest): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["cloud_exadata_infrastructure_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "cloudExadataInfrastructureId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "cloudExadataInfrastructureId" in jsonified_request + assert jsonified_request["cloudExadataInfrastructureId"] == request_init["cloud_exadata_infrastructure_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["cloudExadataInfrastructureId"] = 'cloud_exadata_infrastructure_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("cloud_exadata_infrastructure_id", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "cloudExadataInfrastructureId" in jsonified_request + assert jsonified_request["cloudExadataInfrastructureId"] == 'cloud_exadata_infrastructure_id_value' + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_cloud_exadata_infrastructure(request) + + expected_params = [ + ( + "cloudExadataInfrastructureId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_cloud_exadata_infrastructure_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_cloud_exadata_infrastructure._get_unset_required_fields({}) + assert set(unset_fields) == (set(("cloudExadataInfrastructureId", "requestId", )) & set(("parent", "cloudExadataInfrastructureId", "cloudExadataInfrastructure", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_cloud_exadata_infrastructure_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_create_cloud_exadata_infrastructure") as post, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_create_cloud_exadata_infrastructure") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.CreateCloudExadataInfrastructureRequest.pb(oracledatabase.CreateCloudExadataInfrastructureRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = oracledatabase.CreateCloudExadataInfrastructureRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_cloud_exadata_infrastructure(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_cloud_exadata_infrastructure_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.CreateCloudExadataInfrastructureRequest): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_cloud_exadata_infrastructure(request) + + +def test_create_cloud_exadata_infrastructure_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + cloud_exadata_infrastructure=exadata_infra.CloudExadataInfrastructure(name='name_value'), + cloud_exadata_infrastructure_id='cloud_exadata_infrastructure_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_cloud_exadata_infrastructure(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/cloudExadataInfrastructures" % client.transport._host, args[1]) + + +def test_create_cloud_exadata_infrastructure_rest_flattened_error(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_cloud_exadata_infrastructure( + oracledatabase.CreateCloudExadataInfrastructureRequest(), + parent='parent_value', + cloud_exadata_infrastructure=exadata_infra.CloudExadataInfrastructure(name='name_value'), + cloud_exadata_infrastructure_id='cloud_exadata_infrastructure_id_value', + ) + + +def test_create_cloud_exadata_infrastructure_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + oracledatabase.DeleteCloudExadataInfrastructureRequest, + dict, +]) +def test_delete_cloud_exadata_infrastructure_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_cloud_exadata_infrastructure(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + +def test_delete_cloud_exadata_infrastructure_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_cloud_exadata_infrastructure in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_cloud_exadata_infrastructure] = mock_rpc + + request = {} + client.delete_cloud_exadata_infrastructure(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_cloud_exadata_infrastructure(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_cloud_exadata_infrastructure_rest_required_fields(request_type=oracledatabase.DeleteCloudExadataInfrastructureRequest): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_cloud_exadata_infrastructure(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_cloud_exadata_infrastructure_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_cloud_exadata_infrastructure._get_unset_required_fields({}) + assert set(unset_fields) == (set(("force", "requestId", )) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_cloud_exadata_infrastructure_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_delete_cloud_exadata_infrastructure") as post, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_delete_cloud_exadata_infrastructure") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.DeleteCloudExadataInfrastructureRequest.pb(oracledatabase.DeleteCloudExadataInfrastructureRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = oracledatabase.DeleteCloudExadataInfrastructureRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_cloud_exadata_infrastructure(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_cloud_exadata_infrastructure_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.DeleteCloudExadataInfrastructureRequest): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_cloud_exadata_infrastructure(request) + + +def test_delete_cloud_exadata_infrastructure_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_cloud_exadata_infrastructure(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/cloudExadataInfrastructures/*}" % client.transport._host, args[1]) + + +def test_delete_cloud_exadata_infrastructure_rest_flattened_error(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_cloud_exadata_infrastructure( + oracledatabase.DeleteCloudExadataInfrastructureRequest(), + name='name_value', + ) + + +def test_delete_cloud_exadata_infrastructure_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + oracledatabase.ListCloudVmClustersRequest, + dict, +]) +def test_list_cloud_vm_clusters_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListCloudVmClustersResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListCloudVmClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_cloud_vm_clusters(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCloudVmClustersPager) + assert response.next_page_token == 'next_page_token_value' + +def test_list_cloud_vm_clusters_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_cloud_vm_clusters in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_cloud_vm_clusters] = mock_rpc + + request = {} + client.list_cloud_vm_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_cloud_vm_clusters(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_cloud_vm_clusters_rest_required_fields(request_type=oracledatabase.ListCloudVmClustersRequest): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_cloud_vm_clusters._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_cloud_vm_clusters._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListCloudVmClustersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListCloudVmClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_cloud_vm_clusters(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_cloud_vm_clusters_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_cloud_vm_clusters._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_cloud_vm_clusters_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_cloud_vm_clusters") as post, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_cloud_vm_clusters") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListCloudVmClustersRequest.pb(oracledatabase.ListCloudVmClustersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = oracledatabase.ListCloudVmClustersResponse.to_json(oracledatabase.ListCloudVmClustersResponse()) + + request = oracledatabase.ListCloudVmClustersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListCloudVmClustersResponse() + + client.list_cloud_vm_clusters(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_cloud_vm_clusters_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListCloudVmClustersRequest): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_cloud_vm_clusters(request) + + +def test_list_cloud_vm_clusters_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListCloudVmClustersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListCloudVmClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_cloud_vm_clusters(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/cloudVmClusters" % client.transport._host, args[1]) + + +def test_list_cloud_vm_clusters_rest_flattened_error(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_cloud_vm_clusters( + oracledatabase.ListCloudVmClustersRequest(), + parent='parent_value', + ) + + +def test_list_cloud_vm_clusters_rest_pager(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListCloudVmClustersResponse( + cloud_vm_clusters=[ + vm_cluster.CloudVmCluster(), + vm_cluster.CloudVmCluster(), + vm_cluster.CloudVmCluster(), + ], + next_page_token='abc', + ), + oracledatabase.ListCloudVmClustersResponse( + cloud_vm_clusters=[], + next_page_token='def', + ), + oracledatabase.ListCloudVmClustersResponse( + cloud_vm_clusters=[ + vm_cluster.CloudVmCluster(), + ], + next_page_token='ghi', + ), + oracledatabase.ListCloudVmClustersResponse( + cloud_vm_clusters=[ + vm_cluster.CloudVmCluster(), + vm_cluster.CloudVmCluster(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(oracledatabase.ListCloudVmClustersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_cloud_vm_clusters(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, vm_cluster.CloudVmCluster) + for i in results) + + pages = list(client.list_cloud_vm_clusters(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + oracledatabase.GetCloudVmClusterRequest, + dict, +]) +def test_get_cloud_vm_cluster_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = vm_cluster.CloudVmCluster( + name='name_value', + exadata_infrastructure='exadata_infrastructure_value', + display_name='display_name_value', + gcp_oracle_zone='gcp_oracle_zone_value', + cidr='cidr_value', + backup_subnet_cidr='backup_subnet_cidr_value', + network='network_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vm_cluster.CloudVmCluster.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_cloud_vm_cluster(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, vm_cluster.CloudVmCluster) + assert response.name == 'name_value' + assert response.exadata_infrastructure == 'exadata_infrastructure_value' + assert response.display_name == 'display_name_value' + assert response.gcp_oracle_zone == 'gcp_oracle_zone_value' + assert response.cidr == 'cidr_value' + assert response.backup_subnet_cidr == 'backup_subnet_cidr_value' + assert response.network == 'network_value' + +def test_get_cloud_vm_cluster_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_cloud_vm_cluster in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_cloud_vm_cluster] = mock_rpc + + request = {} + client.get_cloud_vm_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_cloud_vm_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_cloud_vm_cluster_rest_required_fields(request_type=oracledatabase.GetCloudVmClusterRequest): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vm_cluster.CloudVmCluster() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vm_cluster.CloudVmCluster.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_cloud_vm_cluster(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_cloud_vm_cluster_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_cloud_vm_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_cloud_vm_cluster_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_get_cloud_vm_cluster") as post, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_get_cloud_vm_cluster") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.GetCloudVmClusterRequest.pb(oracledatabase.GetCloudVmClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = vm_cluster.CloudVmCluster.to_json(vm_cluster.CloudVmCluster()) + + request = oracledatabase.GetCloudVmClusterRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = vm_cluster.CloudVmCluster() + + client.get_cloud_vm_cluster(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_cloud_vm_cluster_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.GetCloudVmClusterRequest): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_cloud_vm_cluster(request) + + +def test_get_cloud_vm_cluster_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = vm_cluster.CloudVmCluster() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vm_cluster.CloudVmCluster.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_cloud_vm_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/cloudVmClusters/*}" % client.transport._host, args[1]) + + +def test_get_cloud_vm_cluster_rest_flattened_error(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_cloud_vm_cluster( + oracledatabase.GetCloudVmClusterRequest(), + name='name_value', + ) + + +def test_get_cloud_vm_cluster_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + oracledatabase.CreateCloudVmClusterRequest, + dict, +]) +def test_create_cloud_vm_cluster_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["cloud_vm_cluster"] = {'name': 'name_value', 'exadata_infrastructure': 'exadata_infrastructure_value', 'display_name': 'display_name_value', 'gcp_oracle_zone': 'gcp_oracle_zone_value', 'properties': {'ocid': 'ocid_value', 'license_type': 1, 'gi_version': 'gi_version_value', 'time_zone': {'id': 'id_value', 'version': 'version_value'}, 'ssh_public_keys': ['ssh_public_keys_value1', 'ssh_public_keys_value2'], 'node_count': 1070, 'shape': 'shape_value', 'ocpu_count': 0.1087, 'memory_size_gb': 1499, 'db_node_storage_size_gb': 2401, 'storage_size_gb': 1591, 'data_storage_size_tb': 0.2109, 'disk_redundancy': 1, 'sparse_diskgroup_enabled': True, 'local_backup_enabled': True, 'hostname_prefix': 'hostname_prefix_value', 'diagnostics_data_collection_options': {'diagnostics_events_enabled': True, 'health_monitoring_enabled': True, 'incident_logs_enabled': True}, 'state': 1, 'scan_listener_port_tcp': 2356, 'scan_listener_port_tcp_ssl': 2789, 'domain': 'domain_value', 'scan_dns': 'scan_dns_value', 'hostname': 'hostname_value', 'cpu_core_count': 1496, 'system_version': 'system_version_value', 'scan_ip_ids': ['scan_ip_ids_value1', 'scan_ip_ids_value2'], 'scan_dns_record_id': 'scan_dns_record_id_value', 'oci_url': 'oci_url_value', 'db_server_ocids': ['db_server_ocids_value1', 'db_server_ocids_value2'], 'compartment_id': 'compartment_id_value', 'dns_listener_ip': 'dns_listener_ip_value', 'cluster_name': 'cluster_name_value'}, 'labels': {}, 'create_time': {'seconds': 751, 'nanos': 543}, 'cidr': 'cidr_value', 'backup_subnet_cidr': 'backup_subnet_cidr_value', 'network': 'network_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = oracledatabase.CreateCloudVmClusterRequest.meta.fields["cloud_vm_cluster"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["cloud_vm_cluster"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["cloud_vm_cluster"][field])): + del request_init["cloud_vm_cluster"][field][i][subfield] + else: + del request_init["cloud_vm_cluster"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_cloud_vm_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + +def test_create_cloud_vm_cluster_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_cloud_vm_cluster in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_cloud_vm_cluster] = mock_rpc + + request = {} + client.create_cloud_vm_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_cloud_vm_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_cloud_vm_cluster_rest_required_fields(request_type=oracledatabase.CreateCloudVmClusterRequest): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["cloud_vm_cluster_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "cloudVmClusterId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "cloudVmClusterId" in jsonified_request + assert jsonified_request["cloudVmClusterId"] == request_init["cloud_vm_cluster_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["cloudVmClusterId"] = 'cloud_vm_cluster_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("cloud_vm_cluster_id", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "cloudVmClusterId" in jsonified_request + assert jsonified_request["cloudVmClusterId"] == 'cloud_vm_cluster_id_value' + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_cloud_vm_cluster(request) + + expected_params = [ + ( + "cloudVmClusterId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_cloud_vm_cluster_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_cloud_vm_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(("cloudVmClusterId", "requestId", )) & set(("parent", "cloudVmClusterId", "cloudVmCluster", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_cloud_vm_cluster_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_create_cloud_vm_cluster") as post, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_create_cloud_vm_cluster") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.CreateCloudVmClusterRequest.pb(oracledatabase.CreateCloudVmClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = oracledatabase.CreateCloudVmClusterRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_cloud_vm_cluster(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_cloud_vm_cluster_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.CreateCloudVmClusterRequest): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_cloud_vm_cluster(request) + + +def test_create_cloud_vm_cluster_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + cloud_vm_cluster=vm_cluster.CloudVmCluster(name='name_value'), + cloud_vm_cluster_id='cloud_vm_cluster_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_cloud_vm_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/cloudVmClusters" % client.transport._host, args[1]) + + +def test_create_cloud_vm_cluster_rest_flattened_error(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_cloud_vm_cluster( + oracledatabase.CreateCloudVmClusterRequest(), + parent='parent_value', + cloud_vm_cluster=vm_cluster.CloudVmCluster(name='name_value'), + cloud_vm_cluster_id='cloud_vm_cluster_id_value', + ) + + +def test_create_cloud_vm_cluster_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + oracledatabase.DeleteCloudVmClusterRequest, + dict, +]) +def test_delete_cloud_vm_cluster_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_cloud_vm_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + +def test_delete_cloud_vm_cluster_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_cloud_vm_cluster in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_cloud_vm_cluster] = mock_rpc + + request = {} + client.delete_cloud_vm_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_cloud_vm_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_cloud_vm_cluster_rest_required_fields(request_type=oracledatabase.DeleteCloudVmClusterRequest): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_cloud_vm_cluster(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_cloud_vm_cluster_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_cloud_vm_cluster._get_unset_required_fields({}) + assert set(unset_fields) == (set(("force", "requestId", )) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_cloud_vm_cluster_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_delete_cloud_vm_cluster") as post, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_delete_cloud_vm_cluster") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.DeleteCloudVmClusterRequest.pb(oracledatabase.DeleteCloudVmClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = oracledatabase.DeleteCloudVmClusterRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_cloud_vm_cluster(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_cloud_vm_cluster_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.DeleteCloudVmClusterRequest): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_cloud_vm_cluster(request) + + +def test_delete_cloud_vm_cluster_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_cloud_vm_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/cloudVmClusters/*}" % client.transport._host, args[1]) + + +def test_delete_cloud_vm_cluster_rest_flattened_error(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_cloud_vm_cluster( + oracledatabase.DeleteCloudVmClusterRequest(), + name='name_value', + ) + + +def test_delete_cloud_vm_cluster_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + oracledatabase.ListEntitlementsRequest, + dict, +]) +def test_list_entitlements_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListEntitlementsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListEntitlementsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_entitlements(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntitlementsPager) + assert response.next_page_token == 'next_page_token_value' + +def test_list_entitlements_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_entitlements in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_entitlements] = mock_rpc + + request = {} + client.list_entitlements(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_entitlements(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_entitlements_rest_required_fields(request_type=oracledatabase.ListEntitlementsRequest): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_entitlements._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_entitlements._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListEntitlementsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListEntitlementsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_entitlements(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_entitlements_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_entitlements._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_entitlements_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_entitlements") as post, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_entitlements") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListEntitlementsRequest.pb(oracledatabase.ListEntitlementsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = oracledatabase.ListEntitlementsResponse.to_json(oracledatabase.ListEntitlementsResponse()) + + request = oracledatabase.ListEntitlementsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListEntitlementsResponse() + + client.list_entitlements(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_entitlements_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListEntitlementsRequest): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_entitlements(request) + + +def test_list_entitlements_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListEntitlementsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListEntitlementsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_entitlements(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/entitlements" % client.transport._host, args[1]) + + +def test_list_entitlements_rest_flattened_error(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_entitlements( + oracledatabase.ListEntitlementsRequest(), + parent='parent_value', + ) + + +def test_list_entitlements_rest_pager(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListEntitlementsResponse( + entitlements=[ + entitlement.Entitlement(), + entitlement.Entitlement(), + entitlement.Entitlement(), + ], + next_page_token='abc', + ), + oracledatabase.ListEntitlementsResponse( + entitlements=[], + next_page_token='def', + ), + oracledatabase.ListEntitlementsResponse( + entitlements=[ + entitlement.Entitlement(), + ], + next_page_token='ghi', + ), + oracledatabase.ListEntitlementsResponse( + entitlements=[ + entitlement.Entitlement(), + entitlement.Entitlement(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(oracledatabase.ListEntitlementsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_entitlements(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, entitlement.Entitlement) + for i in results) + + pages = list(client.list_entitlements(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + oracledatabase.ListDbServersRequest, + dict, +]) +def test_list_db_servers_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListDbServersResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListDbServersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_db_servers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDbServersPager) + assert response.next_page_token == 'next_page_token_value' + +def test_list_db_servers_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_db_servers in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_db_servers] = mock_rpc + + request = {} + client.list_db_servers(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_db_servers(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_db_servers_rest_required_fields(request_type=oracledatabase.ListDbServersRequest): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_db_servers._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_db_servers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListDbServersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListDbServersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_db_servers(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_db_servers_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_db_servers._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_db_servers_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_db_servers") as post, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_db_servers") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListDbServersRequest.pb(oracledatabase.ListDbServersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = oracledatabase.ListDbServersResponse.to_json(oracledatabase.ListDbServersResponse()) + + request = oracledatabase.ListDbServersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListDbServersResponse() + + client.list_db_servers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_db_servers_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListDbServersRequest): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_db_servers(request) + + +def test_list_db_servers_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListDbServersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListDbServersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_db_servers(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/cloudExadataInfrastructures/*}/dbServers" % client.transport._host, args[1]) + + +def test_list_db_servers_rest_flattened_error(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_db_servers( + oracledatabase.ListDbServersRequest(), + parent='parent_value', + ) + + +def test_list_db_servers_rest_pager(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListDbServersResponse( + db_servers=[ + db_server.DbServer(), + db_server.DbServer(), + db_server.DbServer(), + ], + next_page_token='abc', + ), + oracledatabase.ListDbServersResponse( + db_servers=[], + next_page_token='def', + ), + oracledatabase.ListDbServersResponse( + db_servers=[ + db_server.DbServer(), + ], + next_page_token='ghi', + ), + oracledatabase.ListDbServersResponse( + db_servers=[ + db_server.DbServer(), + db_server.DbServer(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(oracledatabase.ListDbServersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + + pager = client.list_db_servers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, db_server.DbServer) + for i in results) + + pages = list(client.list_db_servers(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + oracledatabase.ListDbNodesRequest, + dict, +]) +def test_list_db_nodes_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListDbNodesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListDbNodesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_db_nodes(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDbNodesPager) + assert response.next_page_token == 'next_page_token_value' + +def test_list_db_nodes_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_db_nodes in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_db_nodes] = mock_rpc + + request = {} + client.list_db_nodes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_db_nodes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_db_nodes_rest_required_fields(request_type=oracledatabase.ListDbNodesRequest): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_db_nodes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_db_nodes._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListDbNodesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListDbNodesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_db_nodes(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_db_nodes_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_db_nodes._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_db_nodes_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_db_nodes") as post, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_db_nodes") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListDbNodesRequest.pb(oracledatabase.ListDbNodesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = oracledatabase.ListDbNodesResponse.to_json(oracledatabase.ListDbNodesResponse()) + + request = oracledatabase.ListDbNodesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListDbNodesResponse() + + client.list_db_nodes(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_db_nodes_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListDbNodesRequest): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_db_nodes(request) + + +def test_list_db_nodes_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListDbNodesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListDbNodesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_db_nodes(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*/cloudVmClusters/*}/dbNodes" % client.transport._host, args[1]) + + +def test_list_db_nodes_rest_flattened_error(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_db_nodes( + oracledatabase.ListDbNodesRequest(), + parent='parent_value', + ) + + +def test_list_db_nodes_rest_pager(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListDbNodesResponse( + db_nodes=[ + db_node.DbNode(), + db_node.DbNode(), + db_node.DbNode(), + ], + next_page_token='abc', + ), + oracledatabase.ListDbNodesResponse( + db_nodes=[], + next_page_token='def', + ), + oracledatabase.ListDbNodesResponse( + db_nodes=[ + db_node.DbNode(), + ], + next_page_token='ghi', + ), + oracledatabase.ListDbNodesResponse( + db_nodes=[ + db_node.DbNode(), + db_node.DbNode(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(oracledatabase.ListDbNodesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + + pager = client.list_db_nodes(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, db_node.DbNode) + for i in results) + + pages = list(client.list_db_nodes(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + oracledatabase.ListGiVersionsRequest, + dict, +]) +def test_list_gi_versions_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListGiVersionsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListGiVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_gi_versions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGiVersionsPager) + assert response.next_page_token == 'next_page_token_value' + +def test_list_gi_versions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_gi_versions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_gi_versions] = mock_rpc + + request = {} + client.list_gi_versions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_gi_versions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_gi_versions_rest_required_fields(request_type=oracledatabase.ListGiVersionsRequest): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_gi_versions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_gi_versions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListGiVersionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListGiVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_gi_versions(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_gi_versions_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_gi_versions._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_gi_versions_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_gi_versions") as post, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_gi_versions") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListGiVersionsRequest.pb(oracledatabase.ListGiVersionsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = oracledatabase.ListGiVersionsResponse.to_json(oracledatabase.ListGiVersionsResponse()) + + request = oracledatabase.ListGiVersionsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListGiVersionsResponse() + + client.list_gi_versions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_gi_versions_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListGiVersionsRequest): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_gi_versions(request) + + +def test_list_gi_versions_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListGiVersionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListGiVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_gi_versions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/giVersions" % client.transport._host, args[1]) + + +def test_list_gi_versions_rest_flattened_error(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_gi_versions( + oracledatabase.ListGiVersionsRequest(), + parent='parent_value', + ) + + +def test_list_gi_versions_rest_pager(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListGiVersionsResponse( + gi_versions=[ + gi_version.GiVersion(), + gi_version.GiVersion(), + gi_version.GiVersion(), + ], + next_page_token='abc', + ), + oracledatabase.ListGiVersionsResponse( + gi_versions=[], + next_page_token='def', + ), + oracledatabase.ListGiVersionsResponse( + gi_versions=[ + gi_version.GiVersion(), + ], + next_page_token='ghi', + ), + oracledatabase.ListGiVersionsResponse( + gi_versions=[ + gi_version.GiVersion(), + gi_version.GiVersion(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(oracledatabase.ListGiVersionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_gi_versions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, gi_version.GiVersion) + for i in results) + + pages = list(client.list_gi_versions(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + oracledatabase.ListDbSystemShapesRequest, + dict, +]) +def test_list_db_system_shapes_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListDbSystemShapesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListDbSystemShapesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_db_system_shapes(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDbSystemShapesPager) + assert response.next_page_token == 'next_page_token_value' + +def test_list_db_system_shapes_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_db_system_shapes in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_db_system_shapes] = mock_rpc + + request = {} + client.list_db_system_shapes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_db_system_shapes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_db_system_shapes_rest_required_fields(request_type=oracledatabase.ListDbSystemShapesRequest): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_db_system_shapes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_db_system_shapes._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListDbSystemShapesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListDbSystemShapesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_db_system_shapes(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_db_system_shapes_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_db_system_shapes._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_db_system_shapes_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_db_system_shapes") as post, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_db_system_shapes") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListDbSystemShapesRequest.pb(oracledatabase.ListDbSystemShapesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = oracledatabase.ListDbSystemShapesResponse.to_json(oracledatabase.ListDbSystemShapesResponse()) + + request = oracledatabase.ListDbSystemShapesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListDbSystemShapesResponse() + + client.list_db_system_shapes(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_db_system_shapes_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListDbSystemShapesRequest): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_db_system_shapes(request) + + +def test_list_db_system_shapes_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListDbSystemShapesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListDbSystemShapesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_db_system_shapes(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/dbSystemShapes" % client.transport._host, args[1]) + + +def test_list_db_system_shapes_rest_flattened_error(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_db_system_shapes( + oracledatabase.ListDbSystemShapesRequest(), + parent='parent_value', + ) + + +def test_list_db_system_shapes_rest_pager(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListDbSystemShapesResponse( + db_system_shapes=[ + db_system_shape.DbSystemShape(), + db_system_shape.DbSystemShape(), + db_system_shape.DbSystemShape(), + ], + next_page_token='abc', + ), + oracledatabase.ListDbSystemShapesResponse( + db_system_shapes=[], + next_page_token='def', + ), + oracledatabase.ListDbSystemShapesResponse( + db_system_shapes=[ + db_system_shape.DbSystemShape(), + ], + next_page_token='ghi', + ), + oracledatabase.ListDbSystemShapesResponse( + db_system_shapes=[ + db_system_shape.DbSystemShape(), + db_system_shape.DbSystemShape(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(oracledatabase.ListDbSystemShapesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_db_system_shapes(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, db_system_shape.DbSystemShape) + for i in results) + + pages = list(client.list_db_system_shapes(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + oracledatabase.ListAutonomousDatabasesRequest, + dict, +]) +def test_list_autonomous_databases_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDatabasesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_autonomous_databases(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutonomousDatabasesPager) + assert response.next_page_token == 'next_page_token_value' + +def test_list_autonomous_databases_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_autonomous_databases in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_autonomous_databases] = mock_rpc + + request = {} + client.list_autonomous_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_autonomous_databases(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_autonomous_databases_rest_required_fields(request_type=oracledatabase.ListAutonomousDatabasesRequest): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_autonomous_databases._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_autonomous_databases._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDatabasesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_autonomous_databases(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_autonomous_databases_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_autonomous_databases._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_autonomous_databases_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_autonomous_databases") as post, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_autonomous_databases") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListAutonomousDatabasesRequest.pb(oracledatabase.ListAutonomousDatabasesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = oracledatabase.ListAutonomousDatabasesResponse.to_json(oracledatabase.ListAutonomousDatabasesResponse()) + + request = oracledatabase.ListAutonomousDatabasesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListAutonomousDatabasesResponse() + + client.list_autonomous_databases(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_autonomous_databases_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListAutonomousDatabasesRequest): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_autonomous_databases(request) + + +def test_list_autonomous_databases_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDatabasesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_autonomous_databases(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/autonomousDatabases" % client.transport._host, args[1]) + + +def test_list_autonomous_databases_rest_flattened_error(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_autonomous_databases( + oracledatabase.ListAutonomousDatabasesRequest(), + parent='parent_value', + ) + + +def test_list_autonomous_databases_rest_pager(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListAutonomousDatabasesResponse( + autonomous_databases=[ + autonomous_database.AutonomousDatabase(), + autonomous_database.AutonomousDatabase(), + autonomous_database.AutonomousDatabase(), + ], + next_page_token='abc', + ), + oracledatabase.ListAutonomousDatabasesResponse( + autonomous_databases=[], + next_page_token='def', + ), + oracledatabase.ListAutonomousDatabasesResponse( + autonomous_databases=[ + autonomous_database.AutonomousDatabase(), + ], + next_page_token='ghi', + ), + oracledatabase.ListAutonomousDatabasesResponse( + autonomous_databases=[ + autonomous_database.AutonomousDatabase(), + autonomous_database.AutonomousDatabase(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(oracledatabase.ListAutonomousDatabasesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_autonomous_databases(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, autonomous_database.AutonomousDatabase) + for i in results) + + pages = list(client.list_autonomous_databases(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + oracledatabase.GetAutonomousDatabaseRequest, + dict, +]) +def test_get_autonomous_database_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = autonomous_database.AutonomousDatabase( + name='name_value', + database='database_value', + display_name='display_name_value', + entitlement_id='entitlement_id_value', + admin_password='admin_password_value', + network='network_value', + cidr='cidr_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = autonomous_database.AutonomousDatabase.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_autonomous_database(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, autonomous_database.AutonomousDatabase) + assert response.name == 'name_value' + assert response.database == 'database_value' + assert response.display_name == 'display_name_value' + assert response.entitlement_id == 'entitlement_id_value' + assert response.admin_password == 'admin_password_value' + assert response.network == 'network_value' + assert response.cidr == 'cidr_value' + +def test_get_autonomous_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_autonomous_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_autonomous_database] = mock_rpc + + request = {} + client.get_autonomous_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_autonomous_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_autonomous_database_rest_required_fields(request_type=oracledatabase.GetAutonomousDatabaseRequest): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_autonomous_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_autonomous_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = autonomous_database.AutonomousDatabase() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = autonomous_database.AutonomousDatabase.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_autonomous_database(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_autonomous_database_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_autonomous_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_autonomous_database_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_get_autonomous_database") as post, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_get_autonomous_database") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.GetAutonomousDatabaseRequest.pb(oracledatabase.GetAutonomousDatabaseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = autonomous_database.AutonomousDatabase.to_json(autonomous_database.AutonomousDatabase()) + + request = oracledatabase.GetAutonomousDatabaseRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = autonomous_database.AutonomousDatabase() + + client.get_autonomous_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_autonomous_database_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.GetAutonomousDatabaseRequest): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_autonomous_database(request) + + +def test_get_autonomous_database_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = autonomous_database.AutonomousDatabase() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = autonomous_database.AutonomousDatabase.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_autonomous_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/autonomousDatabases/*}" % client.transport._host, args[1]) + + +def test_get_autonomous_database_rest_flattened_error(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_autonomous_database( + oracledatabase.GetAutonomousDatabaseRequest(), + name='name_value', + ) + + +def test_get_autonomous_database_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + oracledatabase.CreateAutonomousDatabaseRequest, + dict, +]) +def test_create_autonomous_database_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["autonomous_database"] = {'name': 'name_value', 'database': 'database_value', 'display_name': 'display_name_value', 'entitlement_id': 'entitlement_id_value', 'admin_password': 'admin_password_value', 'properties': {'ocid': 'ocid_value', 'compute_count': 0.1413, 'cpu_core_count': 1496, 'data_storage_size_tb': 2109, 'data_storage_size_gb': 2096, 'db_workload': 1, 'db_edition': 1, 'character_set': 'character_set_value', 'n_character_set': 'n_character_set_value', 'private_endpoint_ip': 'private_endpoint_ip_value', 'private_endpoint_label': 'private_endpoint_label_value', 'db_version': 'db_version_value', 'is_auto_scaling_enabled': True, 'is_storage_auto_scaling_enabled': True, 'license_type': 1, 'customer_contacts': [{'email': 'email_value'}], 'secret_id': 'secret_id_value', 'vault_id': 'vault_id_value', 'maintenance_schedule_type': 1, 'mtls_connection_required': True, 'backup_retention_period_days': 2975, 'actual_used_data_storage_size_tb': 0.3366, 'allocated_storage_size_tb': 0.2636, 'apex_details': {'apex_version': 'apex_version_value', 'ords_version': 'ords_version_value'}, 'are_primary_allowlisted_ips_used': True, 'lifecycle_details': 'lifecycle_details_value', 'state': 1, 'autonomous_container_database_id': 'autonomous_container_database_id_value', 'available_upgrade_versions': ['available_upgrade_versions_value1', 'available_upgrade_versions_value2'], 'connection_strings': {'all_connection_strings': {'high': 'high_value', 'low': 'low_value', 'medium': 'medium_value'}, 'dedicated': 'dedicated_value', 'high': 'high_value', 'low': 'low_value', 'medium': 'medium_value', 'profiles': [{'consumer_group': 1, 'display_name': 'display_name_value', 'host_format': 1, 'is_regional': True, 'protocol': 1, 'session_mode': 1, 'syntax_format': 1, 'tls_authentication': 1, 'value': 'value_value'}]}, 'connection_urls': {'apex_uri': 'apex_uri_value', 'database_transforms_uri': 'database_transforms_uri_value', 'graph_studio_uri': 'graph_studio_uri_value', 'machine_learning_notebook_uri': 'machine_learning_notebook_uri_value', 'machine_learning_user_management_uri': 'machine_learning_user_management_uri_value', 'mongo_db_uri': 'mongo_db_uri_value', 'ords_uri': 'ords_uri_value', 'sql_dev_web_uri': 'sql_dev_web_uri_value'}, 'failed_data_recovery_duration': {'seconds': 751, 'nanos': 543}, 'memory_table_gbs': 1691, 'is_local_data_guard_enabled': True, 'local_adg_auto_failover_max_data_loss_limit': 4513, 'local_standby_db': {'lag_time_duration': {}, 'lifecycle_details': 'lifecycle_details_value', 'state': 1, 'data_guard_role_changed_time': {'seconds': 751, 'nanos': 543}, 'disaster_recovery_role_changed_time': {}}, 'memory_per_oracle_compute_unit_gbs': 3626, 'local_disaster_recovery_type': 1, 'data_safe_state': 1, 'database_management_state': 1, 'open_mode': 1, 'operations_insights_state': 1, 'peer_db_ids': ['peer_db_ids_value1', 'peer_db_ids_value2'], 'permission_level': 1, 'private_endpoint': 'private_endpoint_value', 'refreshable_mode': 1, 'refreshable_state': 1, 'role': 1, 'scheduled_operation_details': [{'day_of_week': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'stop_time': {}}], 'sql_web_developer_url': 'sql_web_developer_url_value', 'supported_clone_regions': ['supported_clone_regions_value1', 'supported_clone_regions_value2'], 'used_data_storage_size_tbs': 2752, 'oci_url': 'oci_url_value', 'total_auto_backup_storage_size_gbs': 0.36100000000000004, 'next_long_term_backup_time': {}, 'maintenance_begin_time': {}, 'maintenance_end_time': {}}, 'labels': {}, 'network': 'network_value', 'cidr': 'cidr_value', 'create_time': {}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = oracledatabase.CreateAutonomousDatabaseRequest.meta.fields["autonomous_database"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["autonomous_database"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["autonomous_database"][field])): + del request_init["autonomous_database"][field][i][subfield] + else: + del request_init["autonomous_database"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_autonomous_database(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + +def test_create_autonomous_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_autonomous_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_autonomous_database] = mock_rpc + + request = {} + client.create_autonomous_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_autonomous_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_autonomous_database_rest_required_fields(request_type=oracledatabase.CreateAutonomousDatabaseRequest): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["autonomous_database_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "autonomousDatabaseId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_autonomous_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "autonomousDatabaseId" in jsonified_request + assert jsonified_request["autonomousDatabaseId"] == request_init["autonomous_database_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["autonomousDatabaseId"] = 'autonomous_database_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_autonomous_database._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("autonomous_database_id", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "autonomousDatabaseId" in jsonified_request + assert jsonified_request["autonomousDatabaseId"] == 'autonomous_database_id_value' + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_autonomous_database(request) + + expected_params = [ + ( + "autonomousDatabaseId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_autonomous_database_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_autonomous_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(("autonomousDatabaseId", "requestId", )) & set(("parent", "autonomousDatabaseId", "autonomousDatabase", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_autonomous_database_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_create_autonomous_database") as post, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_create_autonomous_database") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.CreateAutonomousDatabaseRequest.pb(oracledatabase.CreateAutonomousDatabaseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = oracledatabase.CreateAutonomousDatabaseRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_autonomous_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_autonomous_database_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.CreateAutonomousDatabaseRequest): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_autonomous_database(request) + + +def test_create_autonomous_database_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + autonomous_database=gco_autonomous_database.AutonomousDatabase(name='name_value'), + autonomous_database_id='autonomous_database_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_autonomous_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/autonomousDatabases" % client.transport._host, args[1]) + + +def test_create_autonomous_database_rest_flattened_error(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_autonomous_database( + oracledatabase.CreateAutonomousDatabaseRequest(), + parent='parent_value', + autonomous_database=gco_autonomous_database.AutonomousDatabase(name='name_value'), + autonomous_database_id='autonomous_database_id_value', + ) + + +def test_create_autonomous_database_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + oracledatabase.DeleteAutonomousDatabaseRequest, + dict, +]) +def test_delete_autonomous_database_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_autonomous_database(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + +def test_delete_autonomous_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_autonomous_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_autonomous_database] = mock_rpc + + request = {} + client.delete_autonomous_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_autonomous_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_autonomous_database_rest_required_fields(request_type=oracledatabase.DeleteAutonomousDatabaseRequest): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_autonomous_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_autonomous_database._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_autonomous_database(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_autonomous_database_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_autonomous_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_autonomous_database_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_delete_autonomous_database") as post, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_delete_autonomous_database") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.DeleteAutonomousDatabaseRequest.pb(oracledatabase.DeleteAutonomousDatabaseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = oracledatabase.DeleteAutonomousDatabaseRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_autonomous_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_autonomous_database_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.DeleteAutonomousDatabaseRequest): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_autonomous_database(request) + + +def test_delete_autonomous_database_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_autonomous_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/autonomousDatabases/*}" % client.transport._host, args[1]) + + +def test_delete_autonomous_database_rest_flattened_error(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_autonomous_database( + oracledatabase.DeleteAutonomousDatabaseRequest(), + name='name_value', + ) + + +def test_delete_autonomous_database_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + oracledatabase.RestoreAutonomousDatabaseRequest, + dict, +]) +def test_restore_autonomous_database_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.restore_autonomous_database(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + +def test_restore_autonomous_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.restore_autonomous_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.restore_autonomous_database] = mock_rpc + + request = {} + client.restore_autonomous_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.restore_autonomous_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_restore_autonomous_database_rest_required_fields(request_type=oracledatabase.RestoreAutonomousDatabaseRequest): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).restore_autonomous_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).restore_autonomous_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.restore_autonomous_database(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_restore_autonomous_database_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.restore_autonomous_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", "restoreTime", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_restore_autonomous_database_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_restore_autonomous_database") as post, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_restore_autonomous_database") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.RestoreAutonomousDatabaseRequest.pb(oracledatabase.RestoreAutonomousDatabaseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = oracledatabase.RestoreAutonomousDatabaseRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.restore_autonomous_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_restore_autonomous_database_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.RestoreAutonomousDatabaseRequest): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.restore_autonomous_database(request) + + +def test_restore_autonomous_database_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + restore_time=timestamp_pb2.Timestamp(seconds=751), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.restore_autonomous_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/autonomousDatabases/*}:restore" % client.transport._host, args[1]) + + +def test_restore_autonomous_database_rest_flattened_error(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.restore_autonomous_database( + oracledatabase.RestoreAutonomousDatabaseRequest(), + name='name_value', + restore_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +def test_restore_autonomous_database_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + oracledatabase.GenerateAutonomousDatabaseWalletRequest, + dict, +]) +def test_generate_autonomous_database_wallet_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse( + archive_content=b'archive_content_blob', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.generate_autonomous_database_wallet(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, oracledatabase.GenerateAutonomousDatabaseWalletResponse) + assert response.archive_content == b'archive_content_blob' + +def test_generate_autonomous_database_wallet_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.generate_autonomous_database_wallet in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.generate_autonomous_database_wallet] = mock_rpc + + request = {} + client.generate_autonomous_database_wallet(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_autonomous_database_wallet(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_generate_autonomous_database_wallet_rest_required_fields(request_type=oracledatabase.GenerateAutonomousDatabaseWalletRequest): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["name"] = "" + request_init["password"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_autonomous_database_wallet._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + jsonified_request["password"] = 'password_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_autonomous_database_wallet._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + assert "password" in jsonified_request + assert jsonified_request["password"] == 'password_value' + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.generate_autonomous_database_wallet(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_generate_autonomous_database_wallet_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.generate_autonomous_database_wallet._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", "password", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_generate_autonomous_database_wallet_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_generate_autonomous_database_wallet") as post, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_generate_autonomous_database_wallet") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.GenerateAutonomousDatabaseWalletRequest.pb(oracledatabase.GenerateAutonomousDatabaseWalletRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = oracledatabase.GenerateAutonomousDatabaseWalletResponse.to_json(oracledatabase.GenerateAutonomousDatabaseWalletResponse()) + + request = oracledatabase.GenerateAutonomousDatabaseWalletRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse() + + client.generate_autonomous_database_wallet(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_generate_autonomous_database_wallet_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.GenerateAutonomousDatabaseWalletRequest): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.generate_autonomous_database_wallet(request) + + +def test_generate_autonomous_database_wallet_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + type_=autonomous_database.GenerateType.ALL, + is_regional=True, + password='password_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.generate_autonomous_database_wallet(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/autonomousDatabases/*}:generateWallet" % client.transport._host, args[1]) + + +def test_generate_autonomous_database_wallet_rest_flattened_error(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_autonomous_database_wallet( + oracledatabase.GenerateAutonomousDatabaseWalletRequest(), + name='name_value', + type_=autonomous_database.GenerateType.ALL, + is_regional=True, + password='password_value', + ) + + +def test_generate_autonomous_database_wallet_rest_error(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + oracledatabase.ListAutonomousDbVersionsRequest, + dict, +]) +def test_list_autonomous_db_versions_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDbVersionsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDbVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_autonomous_db_versions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutonomousDbVersionsPager) + assert response.next_page_token == 'next_page_token_value' + +def test_list_autonomous_db_versions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_autonomous_db_versions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_autonomous_db_versions] = mock_rpc + + request = {} + client.list_autonomous_db_versions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_autonomous_db_versions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_autonomous_db_versions_rest_required_fields(request_type=oracledatabase.ListAutonomousDbVersionsRequest): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_autonomous_db_versions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_autonomous_db_versions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDbVersionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDbVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_autonomous_db_versions(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_autonomous_db_versions_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_autonomous_db_versions._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_autonomous_db_versions_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_autonomous_db_versions") as post, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_autonomous_db_versions") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListAutonomousDbVersionsRequest.pb(oracledatabase.ListAutonomousDbVersionsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = oracledatabase.ListAutonomousDbVersionsResponse.to_json(oracledatabase.ListAutonomousDbVersionsResponse()) + + request = oracledatabase.ListAutonomousDbVersionsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListAutonomousDbVersionsResponse() + + client.list_autonomous_db_versions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_autonomous_db_versions_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListAutonomousDbVersionsRequest): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_autonomous_db_versions(request) + + +def test_list_autonomous_db_versions_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDbVersionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDbVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_autonomous_db_versions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/autonomousDbVersions" % client.transport._host, args[1]) + + +def test_list_autonomous_db_versions_rest_flattened_error(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_autonomous_db_versions( + oracledatabase.ListAutonomousDbVersionsRequest(), + parent='parent_value', + ) + + +def test_list_autonomous_db_versions_rest_pager(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListAutonomousDbVersionsResponse( + autonomous_db_versions=[ + autonomous_db_version.AutonomousDbVersion(), + autonomous_db_version.AutonomousDbVersion(), + autonomous_db_version.AutonomousDbVersion(), + ], + next_page_token='abc', + ), + oracledatabase.ListAutonomousDbVersionsResponse( + autonomous_db_versions=[], + next_page_token='def', + ), + oracledatabase.ListAutonomousDbVersionsResponse( + autonomous_db_versions=[ + autonomous_db_version.AutonomousDbVersion(), + ], + next_page_token='ghi', + ), + oracledatabase.ListAutonomousDbVersionsResponse( + autonomous_db_versions=[ + autonomous_db_version.AutonomousDbVersion(), + autonomous_db_version.AutonomousDbVersion(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(oracledatabase.ListAutonomousDbVersionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_autonomous_db_versions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, autonomous_db_version.AutonomousDbVersion) + for i in results) + + pages = list(client.list_autonomous_db_versions(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, + dict, +]) +def test_list_autonomous_database_character_sets_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_autonomous_database_character_sets(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutonomousDatabaseCharacterSetsPager) + assert response.next_page_token == 'next_page_token_value' + +def test_list_autonomous_database_character_sets_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_autonomous_database_character_sets in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_autonomous_database_character_sets] = mock_rpc + + request = {} + client.list_autonomous_database_character_sets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_autonomous_database_character_sets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_autonomous_database_character_sets_rest_required_fields(request_type=oracledatabase.ListAutonomousDatabaseCharacterSetsRequest): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_autonomous_database_character_sets._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_autonomous_database_character_sets._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_autonomous_database_character_sets(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_autonomous_database_character_sets_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_autonomous_database_character_sets._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_autonomous_database_character_sets_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_autonomous_database_character_sets") as post, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_autonomous_database_character_sets") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest.pb(oracledatabase.ListAutonomousDatabaseCharacterSetsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.to_json(oracledatabase.ListAutonomousDatabaseCharacterSetsResponse()) + + request = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse() + + client.list_autonomous_database_character_sets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_autonomous_database_character_sets_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListAutonomousDatabaseCharacterSetsRequest): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_autonomous_database_character_sets(request) + + +def test_list_autonomous_database_character_sets_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_autonomous_database_character_sets(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/autonomousDatabaseCharacterSets" % client.transport._host, args[1]) + + +def test_list_autonomous_database_character_sets_rest_flattened_error(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_autonomous_database_character_sets( + oracledatabase.ListAutonomousDatabaseCharacterSetsRequest(), + parent='parent_value', + ) + + +def test_list_autonomous_database_character_sets_rest_pager(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse( + autonomous_database_character_sets=[ + autonomous_database_character_set.AutonomousDatabaseCharacterSet(), + autonomous_database_character_set.AutonomousDatabaseCharacterSet(), + autonomous_database_character_set.AutonomousDatabaseCharacterSet(), + ], + next_page_token='abc', + ), + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse( + autonomous_database_character_sets=[], + next_page_token='def', + ), + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse( + autonomous_database_character_sets=[ + autonomous_database_character_set.AutonomousDatabaseCharacterSet(), + ], + next_page_token='ghi', + ), + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse( + autonomous_database_character_sets=[ + autonomous_database_character_set.AutonomousDatabaseCharacterSet(), + autonomous_database_character_set.AutonomousDatabaseCharacterSet(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_autonomous_database_character_sets(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, autonomous_database_character_set.AutonomousDatabaseCharacterSet) + for i in results) + + pages = list(client.list_autonomous_database_character_sets(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + oracledatabase.ListAutonomousDatabaseBackupsRequest, + dict, +]) +def test_list_autonomous_database_backups_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_autonomous_database_backups(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutonomousDatabaseBackupsPager) + assert response.next_page_token == 'next_page_token_value' + +def test_list_autonomous_database_backups_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_autonomous_database_backups in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_autonomous_database_backups] = mock_rpc + + request = {} + client.list_autonomous_database_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_autonomous_database_backups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_autonomous_database_backups_rest_required_fields(request_type=oracledatabase.ListAutonomousDatabaseBackupsRequest): + transport_class = transports.OracleDatabaseRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_autonomous_database_backups._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_autonomous_database_backups._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_autonomous_database_backups(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_autonomous_database_backups_rest_unset_required_fields(): + transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_autonomous_database_backups._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_autonomous_database_backups_rest_interceptors(null_interceptor): + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), + ) + client = OracleDatabaseClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_autonomous_database_backups") as post, \ + mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_autonomous_database_backups") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = oracledatabase.ListAutonomousDatabaseBackupsRequest.pb(oracledatabase.ListAutonomousDatabaseBackupsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = oracledatabase.ListAutonomousDatabaseBackupsResponse.to_json(oracledatabase.ListAutonomousDatabaseBackupsResponse()) + + request = oracledatabase.ListAutonomousDatabaseBackupsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse() + + client.list_autonomous_database_backups(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_autonomous_database_backups_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListAutonomousDatabaseBackupsRequest): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_autonomous_database_backups(request) + + +def test_list_autonomous_database_backups_rest_flattened(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_autonomous_database_backups(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/autonomousDatabaseBackups" % client.transport._host, args[1]) + + +def test_list_autonomous_database_backups_rest_flattened_error(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_autonomous_database_backups( + oracledatabase.ListAutonomousDatabaseBackupsRequest(), + parent='parent_value', + ) + + +def test_list_autonomous_database_backups_rest_pager(transport: str = 'rest'): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + oracledatabase.ListAutonomousDatabaseBackupsResponse( + autonomous_database_backups=[ + autonomous_db_backup.AutonomousDatabaseBackup(), + autonomous_db_backup.AutonomousDatabaseBackup(), + autonomous_db_backup.AutonomousDatabaseBackup(), + ], + next_page_token='abc', + ), + oracledatabase.ListAutonomousDatabaseBackupsResponse( + autonomous_database_backups=[], + next_page_token='def', + ), + oracledatabase.ListAutonomousDatabaseBackupsResponse( + autonomous_database_backups=[ + autonomous_db_backup.AutonomousDatabaseBackup(), + ], + next_page_token='ghi', + ), + oracledatabase.ListAutonomousDatabaseBackupsResponse( + autonomous_database_backups=[ + autonomous_db_backup.AutonomousDatabaseBackup(), + autonomous_db_backup.AutonomousDatabaseBackup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(oracledatabase.ListAutonomousDatabaseBackupsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_autonomous_database_backups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, autonomous_db_backup.AutonomousDatabaseBackup) + for i in results) + + pages = list(client.list_autonomous_database_backups(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = OracleDatabaseClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = OracleDatabaseClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = OracleDatabaseClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = OracleDatabaseClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = OracleDatabaseClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.OracleDatabaseRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = OracleDatabaseClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_oracle_database_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.OracleDatabaseTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_oracle_database_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.oracledatabase_v1.services.oracle_database.transports.OracleDatabaseTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.OracleDatabaseTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_cloud_exadata_infrastructures', + 'get_cloud_exadata_infrastructure', + 'create_cloud_exadata_infrastructure', + 'delete_cloud_exadata_infrastructure', + 'list_cloud_vm_clusters', + 'get_cloud_vm_cluster', + 'create_cloud_vm_cluster', + 'delete_cloud_vm_cluster', + 'list_entitlements', + 'list_db_servers', + 'list_db_nodes', + 'list_gi_versions', + 'list_db_system_shapes', + 'list_autonomous_databases', + 'get_autonomous_database', + 'create_autonomous_database', + 'delete_autonomous_database', + 'restore_autonomous_database', + 'generate_autonomous_database_wallet', + 'list_autonomous_db_versions', + 'list_autonomous_database_character_sets', + 'list_autonomous_database_backups', + 'get_location', + 'list_locations', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_oracle_database_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.oracledatabase_v1.services.oracle_database.transports.OracleDatabaseTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.OracleDatabaseTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_oracle_database_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.oracledatabase_v1.services.oracle_database.transports.OracleDatabaseTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.OracleDatabaseTransport() + adc.assert_called_once() + + +def test_oracle_database_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + OracleDatabaseClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_oracle_database_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.OracleDatabaseRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_oracle_database_rest_lro_client(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_oracle_database_host_no_port(transport_name): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='oracledatabase.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'oracledatabase.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://oracledatabase.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_oracle_database_host_with_port(transport_name): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='oracledatabase.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'oracledatabase.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://oracledatabase.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_oracle_database_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = OracleDatabaseClient( + credentials=creds1, + transport=transport_name, + ) + client2 = OracleDatabaseClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_cloud_exadata_infrastructures._session + session2 = client2.transport.list_cloud_exadata_infrastructures._session + assert session1 != session2 + session1 = client1.transport.get_cloud_exadata_infrastructure._session + session2 = client2.transport.get_cloud_exadata_infrastructure._session + assert session1 != session2 + session1 = client1.transport.create_cloud_exadata_infrastructure._session + session2 = client2.transport.create_cloud_exadata_infrastructure._session + assert session1 != session2 + session1 = client1.transport.delete_cloud_exadata_infrastructure._session + session2 = client2.transport.delete_cloud_exadata_infrastructure._session + assert session1 != session2 + session1 = client1.transport.list_cloud_vm_clusters._session + session2 = client2.transport.list_cloud_vm_clusters._session + assert session1 != session2 + session1 = client1.transport.get_cloud_vm_cluster._session + session2 = client2.transport.get_cloud_vm_cluster._session + assert session1 != session2 + session1 = client1.transport.create_cloud_vm_cluster._session + session2 = client2.transport.create_cloud_vm_cluster._session + assert session1 != session2 + session1 = client1.transport.delete_cloud_vm_cluster._session + session2 = client2.transport.delete_cloud_vm_cluster._session + assert session1 != session2 + session1 = client1.transport.list_entitlements._session + session2 = client2.transport.list_entitlements._session + assert session1 != session2 + session1 = client1.transport.list_db_servers._session + session2 = client2.transport.list_db_servers._session + assert session1 != session2 + session1 = client1.transport.list_db_nodes._session + session2 = client2.transport.list_db_nodes._session + assert session1 != session2 + session1 = client1.transport.list_gi_versions._session + session2 = client2.transport.list_gi_versions._session + assert session1 != session2 + session1 = client1.transport.list_db_system_shapes._session + session2 = client2.transport.list_db_system_shapes._session + assert session1 != session2 + session1 = client1.transport.list_autonomous_databases._session + session2 = client2.transport.list_autonomous_databases._session + assert session1 != session2 + session1 = client1.transport.get_autonomous_database._session + session2 = client2.transport.get_autonomous_database._session + assert session1 != session2 + session1 = client1.transport.create_autonomous_database._session + session2 = client2.transport.create_autonomous_database._session + assert session1 != session2 + session1 = client1.transport.delete_autonomous_database._session + session2 = client2.transport.delete_autonomous_database._session + assert session1 != session2 + session1 = client1.transport.restore_autonomous_database._session + session2 = client2.transport.restore_autonomous_database._session + assert session1 != session2 + session1 = client1.transport.generate_autonomous_database_wallet._session + session2 = client2.transport.generate_autonomous_database_wallet._session + assert session1 != session2 + session1 = client1.transport.list_autonomous_db_versions._session + session2 = client2.transport.list_autonomous_db_versions._session + assert session1 != session2 + session1 = client1.transport.list_autonomous_database_character_sets._session + session2 = client2.transport.list_autonomous_database_character_sets._session + assert session1 != session2 + session1 = client1.transport.list_autonomous_database_backups._session + session2 = client2.transport.list_autonomous_database_backups._session + assert session1 != session2 + +def test_autonomous_database_path(): + project = "squid" + location = "clam" + autonomous_database = "whelk" + expected = "projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}".format(project=project, location=location, autonomous_database=autonomous_database, ) + actual = OracleDatabaseClient.autonomous_database_path(project, location, autonomous_database) + assert expected == actual + + +def test_parse_autonomous_database_path(): + expected = { + "project": "octopus", + "location": "oyster", + "autonomous_database": "nudibranch", + } + path = OracleDatabaseClient.autonomous_database_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_autonomous_database_path(path) + assert expected == actual + +def test_autonomous_database_backup_path(): + project = "cuttlefish" + location = "mussel" + autonomous_database_backup = "winkle" + expected = "projects/{project}/locations/{location}/autonomousDatabaseBackups/{autonomous_database_backup}".format(project=project, location=location, autonomous_database_backup=autonomous_database_backup, ) + actual = OracleDatabaseClient.autonomous_database_backup_path(project, location, autonomous_database_backup) + assert expected == actual + + +def test_parse_autonomous_database_backup_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "autonomous_database_backup": "abalone", + } + path = OracleDatabaseClient.autonomous_database_backup_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_autonomous_database_backup_path(path) + assert expected == actual + +def test_autonomous_database_character_set_path(): + project = "squid" + location = "clam" + autonomous_database_character_set = "whelk" + expected = "projects/{project}/locations/{location}/autonomousDatabaseCharacterSets/{autonomous_database_character_set}".format(project=project, location=location, autonomous_database_character_set=autonomous_database_character_set, ) + actual = OracleDatabaseClient.autonomous_database_character_set_path(project, location, autonomous_database_character_set) + assert expected == actual + + +def test_parse_autonomous_database_character_set_path(): + expected = { + "project": "octopus", + "location": "oyster", + "autonomous_database_character_set": "nudibranch", + } + path = OracleDatabaseClient.autonomous_database_character_set_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_autonomous_database_character_set_path(path) + assert expected == actual + +def test_autonomous_db_version_path(): + project = "cuttlefish" + location = "mussel" + autonomous_db_version = "winkle" + expected = "projects/{project}/locations/{location}/autonomousDbVersions/{autonomous_db_version}".format(project=project, location=location, autonomous_db_version=autonomous_db_version, ) + actual = OracleDatabaseClient.autonomous_db_version_path(project, location, autonomous_db_version) + assert expected == actual + + +def test_parse_autonomous_db_version_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "autonomous_db_version": "abalone", + } + path = OracleDatabaseClient.autonomous_db_version_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_autonomous_db_version_path(path) + assert expected == actual + +def test_cloud_exadata_infrastructure_path(): + project = "squid" + location = "clam" + cloud_exadata_infrastructure = "whelk" + expected = "projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}".format(project=project, location=location, cloud_exadata_infrastructure=cloud_exadata_infrastructure, ) + actual = OracleDatabaseClient.cloud_exadata_infrastructure_path(project, location, cloud_exadata_infrastructure) + assert expected == actual + + +def test_parse_cloud_exadata_infrastructure_path(): + expected = { + "project": "octopus", + "location": "oyster", + "cloud_exadata_infrastructure": "nudibranch", + } + path = OracleDatabaseClient.cloud_exadata_infrastructure_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_cloud_exadata_infrastructure_path(path) + assert expected == actual + +def test_cloud_vm_cluster_path(): + project = "cuttlefish" + location = "mussel" + cloud_vm_cluster = "winkle" + expected = "projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}".format(project=project, location=location, cloud_vm_cluster=cloud_vm_cluster, ) + actual = OracleDatabaseClient.cloud_vm_cluster_path(project, location, cloud_vm_cluster) + assert expected == actual + + +def test_parse_cloud_vm_cluster_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "cloud_vm_cluster": "abalone", + } + path = OracleDatabaseClient.cloud_vm_cluster_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_cloud_vm_cluster_path(path) + assert expected == actual + +def test_db_node_path(): + project = "squid" + location = "clam" + cloud_vm_cluster = "whelk" + db_node = "octopus" + expected = "projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}/dbNodes/{db_node}".format(project=project, location=location, cloud_vm_cluster=cloud_vm_cluster, db_node=db_node, ) + actual = OracleDatabaseClient.db_node_path(project, location, cloud_vm_cluster, db_node) + assert expected == actual + + +def test_parse_db_node_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "cloud_vm_cluster": "cuttlefish", + "db_node": "mussel", + } + path = OracleDatabaseClient.db_node_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_db_node_path(path) + assert expected == actual + +def test_db_server_path(): + project = "winkle" + location = "nautilus" + cloud_exadata_infrastructure = "scallop" + db_server = "abalone" + expected = "projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}/dbServers/{db_server}".format(project=project, location=location, cloud_exadata_infrastructure=cloud_exadata_infrastructure, db_server=db_server, ) + actual = OracleDatabaseClient.db_server_path(project, location, cloud_exadata_infrastructure, db_server) + assert expected == actual + + +def test_parse_db_server_path(): + expected = { + "project": "squid", + "location": "clam", + "cloud_exadata_infrastructure": "whelk", + "db_server": "octopus", + } + path = OracleDatabaseClient.db_server_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_db_server_path(path) + assert expected == actual + +def test_db_system_shape_path(): + project = "oyster" + location = "nudibranch" + db_system_shape = "cuttlefish" + expected = "projects/{project}/locations/{location}/dbSystemShapes/{db_system_shape}".format(project=project, location=location, db_system_shape=db_system_shape, ) + actual = OracleDatabaseClient.db_system_shape_path(project, location, db_system_shape) + assert expected == actual + + +def test_parse_db_system_shape_path(): + expected = { + "project": "mussel", + "location": "winkle", + "db_system_shape": "nautilus", + } + path = OracleDatabaseClient.db_system_shape_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_db_system_shape_path(path) + assert expected == actual + +def test_entitlement_path(): + project = "scallop" + location = "abalone" + entitlement = "squid" + expected = "projects/{project}/locations/{location}/entitlements/{entitlement}".format(project=project, location=location, entitlement=entitlement, ) + actual = OracleDatabaseClient.entitlement_path(project, location, entitlement) + assert expected == actual + + +def test_parse_entitlement_path(): + expected = { + "project": "clam", + "location": "whelk", + "entitlement": "octopus", + } + path = OracleDatabaseClient.entitlement_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_entitlement_path(path) + assert expected == actual + +def test_gi_version_path(): + project = "oyster" + location = "nudibranch" + gi_version = "cuttlefish" + expected = "projects/{project}/locations/{location}/giVersions/{gi_version}".format(project=project, location=location, gi_version=gi_version, ) + actual = OracleDatabaseClient.gi_version_path(project, location, gi_version) + assert expected == actual + + +def test_parse_gi_version_path(): + expected = { + "project": "mussel", + "location": "winkle", + "gi_version": "nautilus", + } + path = OracleDatabaseClient.gi_version_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_gi_version_path(path) + assert expected == actual + +def test_network_path(): + project = "scallop" + network = "abalone" + expected = "projects/{project}/global/networks/{network}".format(project=project, network=network, ) + actual = OracleDatabaseClient.network_path(project, network) + assert expected == actual + + +def test_parse_network_path(): + expected = { + "project": "squid", + "network": "clam", + } + path = OracleDatabaseClient.network_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_network_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = OracleDatabaseClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = OracleDatabaseClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format(folder=folder, ) + actual = OracleDatabaseClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = OracleDatabaseClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format(organization=organization, ) + actual = OracleDatabaseClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = OracleDatabaseClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format(project=project, ) + actual = OracleDatabaseClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = OracleDatabaseClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = OracleDatabaseClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = OracleDatabaseClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = OracleDatabaseClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.OracleDatabaseTransport, '_prep_wrapped_messages') as prep: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.OracleDatabaseTransport, '_prep_wrapped_messages') as prep: + transport_class = OracleDatabaseClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_get_location_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.GetLocationRequest): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + +@pytest.mark.parametrize("request_type", [ + locations_pb2.GetLocationRequest, + dict, +]) +def test_get_location_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +def test_list_locations_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.ListLocationsRequest): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + +@pytest.mark.parametrize("request_type", [ + locations_pb2.ListLocationsRequest, + dict, +]) +def test_list_locations_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +def test_cancel_operation_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '{}' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.DeleteOperationRequest): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, +]) +def test_delete_operation_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '{}' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + +def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_list_operations_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.ListOperationsRequest): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +def test_list_operations_rest(request_type): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = OracleDatabaseClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From f02303503813d3e1561ac249a917c8e6014287aa Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Mon, 23 Sep 2024 16:23:57 +0000 Subject: [PATCH 4/9] Owl Bot copied code from https://github.com/googleapis/googleapis-gen/commit/09d68f35365c74ad276cea3e7c26553a1485faa0 From 544acb3865005c3011f9687e236f55ade5ded114 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Mon, 23 Sep 2024 16:24:28 +0000 Subject: [PATCH 5/9] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20po?= =?UTF-8?q?st-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- .../v1/MANIFEST.in | 2 - .../google-cloud-oracledatabase/v1/README.rst | 49 - .../v1/docs/_static/custom.css | 3 - .../v1/docs/index.rst | 7 - .../google/cloud/oracledatabase/__init__.py | 161 - .../cloud/oracledatabase_v1/__init__.py | 162 - .../cloud/oracledatabase_v1/types/__init__.py | 182 - .../google-cloud-oracledatabase/v1/noxfile.py | 278 - .../google-cloud-oracledatabase}/.coveragerc | 0 .../google-cloud-oracledatabase}/.flake8 | 4 +- .../google-cloud-oracledatabase/.gitignore | 63 + .../google-cloud-oracledatabase/CHANGELOG.md | 1 + .../CODE_OF_CONDUCT.md | 95 + .../CONTRIBUTING.rst | 271 + packages/google-cloud-oracledatabase/LICENSE | 202 + .../google-cloud-oracledatabase/MANIFEST.in | 25 + .../google-cloud-oracledatabase/README.rst | 108 + .../docs/CHANGELOG.md | 1 + .../docs/README.rst | 1 + .../docs/_static/custom.css | 20 + .../docs/_templates/layout.html | 50 + .../google-cloud-oracledatabase}/docs/conf.py | 74 +- .../docs/index.rst | 28 + .../docs/multiprocessing.rst | 7 + .../oracledatabase_v1/oracle_database.rst | 0 .../docs/oracledatabase_v1/services_.rst | 0 .../docs/oracledatabase_v1/types_.rst | 0 .../docs/summary_overview.md | 22 + .../google/cloud/oracledatabase/__init__.py | 177 + .../cloud/oracledatabase/gapic_version.py | 0 .../google/cloud/oracledatabase/py.typed | 0 .../cloud/oracledatabase_v1/__init__.py | 168 + .../oracledatabase_v1/gapic_metadata.json | 0 .../cloud/oracledatabase_v1/gapic_version.py | 0 .../google/cloud/oracledatabase_v1/py.typed | 0 .../oracledatabase_v1/services/__init__.py | 0 .../services/oracle_database/__init__.py | 4 +- .../services/oracle_database/client.py | 1346 ++-- .../services/oracle_database/pagers.py | 366 +- .../oracle_database/transports/__init__.py | 12 +- .../oracle_database/transports/base.py | 430 +- .../oracle_database/transports/rest.py | 2530 +++++--- .../cloud/oracledatabase_v1/types/__init__.py | 157 + .../types/autonomous_database.py | 85 +- .../autonomous_database_character_set.py | 6 +- .../types/autonomous_db_backup.py | 15 +- .../types/autonomous_db_version.py | 5 +- .../cloud/oracledatabase_v1/types/common.py | 5 +- .../cloud/oracledatabase_v1/types/db_node.py | 12 +- .../oracledatabase_v1/types/db_server.py | 12 +- .../types/db_system_shape.py | 5 +- .../oracledatabase_v1/types/entitlement.py | 12 +- .../oracledatabase_v1/types/exadata_infra.py | 23 +- .../oracledatabase_v1/types/gi_version.py | 5 +- .../types/location_metadata.py | 5 +- .../oracledatabase_v1/types/oracledatabase.py | 135 +- .../oracledatabase_v1/types/vm_cluster.py | 21 +- .../google-cloud-oracledatabase}/mypy.ini | 0 .../google-cloud-oracledatabase/noxfile.py | 452 ++ ...atabase_create_autonomous_database_sync.py | 0 ...reate_cloud_exadata_infrastructure_sync.py | 0 ...e_database_create_cloud_vm_cluster_sync.py | 0 ...atabase_delete_autonomous_database_sync.py | 0 ...elete_cloud_exadata_infrastructure_sync.py | 0 ...e_database_delete_cloud_vm_cluster_sync.py | 0 ...enerate_autonomous_database_wallet_sync.py | 0 ...e_database_get_autonomous_database_sync.py | 0 ...e_get_cloud_exadata_infrastructure_sync.py | 0 ...acle_database_get_cloud_vm_cluster_sync.py | 0 ...e_list_autonomous_database_backups_sync.py | 0 ...autonomous_database_character_sets_sync.py | 0 ...database_list_autonomous_databases_sync.py | 0 ...tabase_list_autonomous_db_versions_sync.py | 0 ...list_cloud_exadata_infrastructures_sync.py | 0 ...le_database_list_cloud_vm_clusters_sync.py | 0 ...ated_oracle_database_list_db_nodes_sync.py | 0 ...ed_oracle_database_list_db_servers_sync.py | 0 ...cle_database_list_db_system_shapes_sync.py | 0 ..._oracle_database_list_entitlements_sync.py | 0 ...d_oracle_database_list_gi_versions_sync.py | 0 ...tabase_restore_autonomous_database_sync.py | 0 ...tadata_google.cloud.oracledatabase.v1.json | 0 .../scripts/decrypt-secrets.sh | 46 + .../fixup_oracledatabase_v1_keywords.py | 0 .../google-cloud-oracledatabase}/setup.py | 10 +- .../testing/.gitignore | 3 + .../testing/constraints-3.10.txt | 0 .../testing/constraints-3.11.txt | 0 .../testing/constraints-3.12.txt | 0 .../testing/constraints-3.7.txt | 0 .../testing/constraints-3.8.txt | 0 .../testing/constraints-3.9.txt | 0 .../tests/__init__.py | 1 - .../tests/unit/__init__.py | 1 - .../tests/unit/gapic}/__init__.py | 1 - .../unit/gapic/oracledatabase_v1}/__init__.py | 1 - .../oracledatabase_v1/test_oracle_database.py | 5765 +++++++++++------ 97 files changed, 8910 insertions(+), 4722 deletions(-) delete mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/MANIFEST.in delete mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/README.rst delete mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/docs/_static/custom.css delete mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/docs/index.rst delete mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase/__init__.py delete mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/__init__.py delete mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/__init__.py delete mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/noxfile.py rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/.coveragerc (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/.flake8 (92%) create mode 100644 packages/google-cloud-oracledatabase/.gitignore create mode 100644 packages/google-cloud-oracledatabase/CHANGELOG.md create mode 100644 packages/google-cloud-oracledatabase/CODE_OF_CONDUCT.md create mode 100644 packages/google-cloud-oracledatabase/CONTRIBUTING.rst create mode 100644 packages/google-cloud-oracledatabase/LICENSE create mode 100644 packages/google-cloud-oracledatabase/MANIFEST.in create mode 100644 packages/google-cloud-oracledatabase/README.rst create mode 120000 packages/google-cloud-oracledatabase/docs/CHANGELOG.md create mode 120000 packages/google-cloud-oracledatabase/docs/README.rst create mode 100644 packages/google-cloud-oracledatabase/docs/_static/custom.css create mode 100644 packages/google-cloud-oracledatabase/docs/_templates/layout.html rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/docs/conf.py (88%) create mode 100644 packages/google-cloud-oracledatabase/docs/index.rst create mode 100644 packages/google-cloud-oracledatabase/docs/multiprocessing.rst rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/docs/oracledatabase_v1/oracle_database.rst (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/docs/oracledatabase_v1/services_.rst (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/docs/oracledatabase_v1/types_.rst (100%) create mode 100644 packages/google-cloud-oracledatabase/docs/summary_overview.md create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase/__init__.py rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase/gapic_version.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase/py.typed (100%) create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/__init__.py rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/gapic_metadata.json (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/gapic_version.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/py.typed (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/services/__init__.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/services/oracle_database/__init__.py (93%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/services/oracle_database/client.py (79%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/services/oracle_database/pagers.py (76%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/services/oracle_database/transports/__init__.py (76%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/services/oracle_database/transports/base.py (68%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/services/oracle_database/transports/rest.py (60%) create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/__init__.py rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/types/autonomous_database.py (96%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/types/autonomous_database_character_set.py (96%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/types/autonomous_db_backup.py (97%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/types/autonomous_db_version.py (96%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/types/common.py (94%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/types/db_node.py (96%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/types/db_server.py (96%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/types/db_system_shape.py (97%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/types/entitlement.py (94%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/types/exadata_infra.py (97%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/types/gi_version.py (95%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/types/location_metadata.py (94%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/types/oracledatabase.py (92%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/types/vm_cluster.py (97%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/mypy.ini (100%) create mode 100644 packages/google-cloud-oracledatabase/noxfile.py rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_autonomous_database_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_exadata_infrastructure_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_vm_cluster_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_autonomous_database_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_exadata_infrastructure_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_vm_cluster_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_generate_autonomous_database_wallet_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_autonomous_database_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_exadata_infrastructure_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_vm_cluster_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_backups_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_character_sets_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_databases_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_db_versions_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_exadata_infrastructures_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_vm_clusters_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_nodes_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_servers_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_system_shapes_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_entitlements_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_gi_versions_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_restore_autonomous_database_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/snippet_metadata_google.cloud.oracledatabase.v1.json (100%) create mode 100755 packages/google-cloud-oracledatabase/scripts/decrypt-secrets.sh rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/scripts/fixup_oracledatabase_v1_keywords.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/setup.py (93%) create mode 100644 packages/google-cloud-oracledatabase/testing/.gitignore rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/testing/constraints-3.10.txt (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/testing/constraints-3.11.txt (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/testing/constraints-3.12.txt (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/testing/constraints-3.7.txt (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/testing/constraints-3.8.txt (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/testing/constraints-3.9.txt (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/tests/__init__.py (99%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/tests/unit/__init__.py (99%) rename {owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/oracledatabase_v1 => packages/google-cloud-oracledatabase/tests/unit/gapic}/__init__.py (99%) rename {owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic => packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1}/__init__.py (99%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/tests/unit/gapic/oracledatabase_v1/test_oracle_database.py (63%) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/MANIFEST.in b/owl-bot-staging/google-cloud-oracledatabase/v1/MANIFEST.in deleted file mode 100644 index fb8faa261eae..000000000000 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/oracledatabase *.py -recursive-include google/cloud/oracledatabase_v1 *.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/README.rst b/owl-bot-staging/google-cloud-oracledatabase/v1/README.rst deleted file mode 100644 index 33823b82e94f..000000000000 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Oracledatabase API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Oracledatabase API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-oracledatabase/v1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/docs/index.rst b/owl-bot-staging/google-cloud-oracledatabase/v1/docs/index.rst deleted file mode 100644 index 8b7bea7d55e1..000000000000 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - oracledatabase_v1/services - oracledatabase_v1/types diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase/__init__.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase/__init__.py deleted file mode 100644 index 59a90fe071f9..000000000000 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase/__init__.py +++ /dev/null @@ -1,161 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.oracledatabase import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.oracledatabase_v1.services.oracle_database.client import OracleDatabaseClient - -from google.cloud.oracledatabase_v1.types.autonomous_database import AllConnectionStrings -from google.cloud.oracledatabase_v1.types.autonomous_database import AutonomousDatabase -from google.cloud.oracledatabase_v1.types.autonomous_database import AutonomousDatabaseApex -from google.cloud.oracledatabase_v1.types.autonomous_database import AutonomousDatabaseConnectionStrings -from google.cloud.oracledatabase_v1.types.autonomous_database import AutonomousDatabaseConnectionUrls -from google.cloud.oracledatabase_v1.types.autonomous_database import AutonomousDatabaseProperties -from google.cloud.oracledatabase_v1.types.autonomous_database import AutonomousDatabaseStandbySummary -from google.cloud.oracledatabase_v1.types.autonomous_database import DatabaseConnectionStringProfile -from google.cloud.oracledatabase_v1.types.autonomous_database import ScheduledOperationDetails -from google.cloud.oracledatabase_v1.types.autonomous_database import DBWorkload -from google.cloud.oracledatabase_v1.types.autonomous_database import GenerateType -from google.cloud.oracledatabase_v1.types.autonomous_database import OperationsInsightsState -from google.cloud.oracledatabase_v1.types.autonomous_database import State -from google.cloud.oracledatabase_v1.types.autonomous_database_character_set import AutonomousDatabaseCharacterSet -from google.cloud.oracledatabase_v1.types.autonomous_db_backup import AutonomousDatabaseBackup -from google.cloud.oracledatabase_v1.types.autonomous_db_backup import AutonomousDatabaseBackupProperties -from google.cloud.oracledatabase_v1.types.autonomous_db_version import AutonomousDbVersion -from google.cloud.oracledatabase_v1.types.common import CustomerContact -from google.cloud.oracledatabase_v1.types.db_node import DbNode -from google.cloud.oracledatabase_v1.types.db_node import DbNodeProperties -from google.cloud.oracledatabase_v1.types.db_server import DbServer -from google.cloud.oracledatabase_v1.types.db_server import DbServerProperties -from google.cloud.oracledatabase_v1.types.db_system_shape import DbSystemShape -from google.cloud.oracledatabase_v1.types.entitlement import CloudAccountDetails -from google.cloud.oracledatabase_v1.types.entitlement import Entitlement -from google.cloud.oracledatabase_v1.types.exadata_infra import CloudExadataInfrastructure -from google.cloud.oracledatabase_v1.types.exadata_infra import CloudExadataInfrastructureProperties -from google.cloud.oracledatabase_v1.types.exadata_infra import MaintenanceWindow -from google.cloud.oracledatabase_v1.types.gi_version import GiVersion -from google.cloud.oracledatabase_v1.types.location_metadata import LocationMetadata -from google.cloud.oracledatabase_v1.types.oracledatabase import CreateAutonomousDatabaseRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import CreateCloudExadataInfrastructureRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import CreateCloudVmClusterRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import DeleteAutonomousDatabaseRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import DeleteCloudExadataInfrastructureRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import DeleteCloudVmClusterRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import GenerateAutonomousDatabaseWalletRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import GenerateAutonomousDatabaseWalletResponse -from google.cloud.oracledatabase_v1.types.oracledatabase import GetAutonomousDatabaseRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import GetCloudExadataInfrastructureRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import GetCloudVmClusterRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import ListAutonomousDatabaseBackupsRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import ListAutonomousDatabaseBackupsResponse -from google.cloud.oracledatabase_v1.types.oracledatabase import ListAutonomousDatabaseCharacterSetsRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import ListAutonomousDatabaseCharacterSetsResponse -from google.cloud.oracledatabase_v1.types.oracledatabase import ListAutonomousDatabasesRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import ListAutonomousDatabasesResponse -from google.cloud.oracledatabase_v1.types.oracledatabase import ListAutonomousDbVersionsRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import ListAutonomousDbVersionsResponse -from google.cloud.oracledatabase_v1.types.oracledatabase import ListCloudExadataInfrastructuresRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import ListCloudExadataInfrastructuresResponse -from google.cloud.oracledatabase_v1.types.oracledatabase import ListCloudVmClustersRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import ListCloudVmClustersResponse -from google.cloud.oracledatabase_v1.types.oracledatabase import ListDbNodesRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import ListDbNodesResponse -from google.cloud.oracledatabase_v1.types.oracledatabase import ListDbServersRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import ListDbServersResponse -from google.cloud.oracledatabase_v1.types.oracledatabase import ListDbSystemShapesRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import ListDbSystemShapesResponse -from google.cloud.oracledatabase_v1.types.oracledatabase import ListEntitlementsRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import ListEntitlementsResponse -from google.cloud.oracledatabase_v1.types.oracledatabase import ListGiVersionsRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import ListGiVersionsResponse -from google.cloud.oracledatabase_v1.types.oracledatabase import OperationMetadata -from google.cloud.oracledatabase_v1.types.oracledatabase import RestoreAutonomousDatabaseRequest -from google.cloud.oracledatabase_v1.types.vm_cluster import CloudVmCluster -from google.cloud.oracledatabase_v1.types.vm_cluster import CloudVmClusterProperties -from google.cloud.oracledatabase_v1.types.vm_cluster import DataCollectionOptions - -__all__ = ('OracleDatabaseClient', - 'AllConnectionStrings', - 'AutonomousDatabase', - 'AutonomousDatabaseApex', - 'AutonomousDatabaseConnectionStrings', - 'AutonomousDatabaseConnectionUrls', - 'AutonomousDatabaseProperties', - 'AutonomousDatabaseStandbySummary', - 'DatabaseConnectionStringProfile', - 'ScheduledOperationDetails', - 'DBWorkload', - 'GenerateType', - 'OperationsInsightsState', - 'State', - 'AutonomousDatabaseCharacterSet', - 'AutonomousDatabaseBackup', - 'AutonomousDatabaseBackupProperties', - 'AutonomousDbVersion', - 'CustomerContact', - 'DbNode', - 'DbNodeProperties', - 'DbServer', - 'DbServerProperties', - 'DbSystemShape', - 'CloudAccountDetails', - 'Entitlement', - 'CloudExadataInfrastructure', - 'CloudExadataInfrastructureProperties', - 'MaintenanceWindow', - 'GiVersion', - 'LocationMetadata', - 'CreateAutonomousDatabaseRequest', - 'CreateCloudExadataInfrastructureRequest', - 'CreateCloudVmClusterRequest', - 'DeleteAutonomousDatabaseRequest', - 'DeleteCloudExadataInfrastructureRequest', - 'DeleteCloudVmClusterRequest', - 'GenerateAutonomousDatabaseWalletRequest', - 'GenerateAutonomousDatabaseWalletResponse', - 'GetAutonomousDatabaseRequest', - 'GetCloudExadataInfrastructureRequest', - 'GetCloudVmClusterRequest', - 'ListAutonomousDatabaseBackupsRequest', - 'ListAutonomousDatabaseBackupsResponse', - 'ListAutonomousDatabaseCharacterSetsRequest', - 'ListAutonomousDatabaseCharacterSetsResponse', - 'ListAutonomousDatabasesRequest', - 'ListAutonomousDatabasesResponse', - 'ListAutonomousDbVersionsRequest', - 'ListAutonomousDbVersionsResponse', - 'ListCloudExadataInfrastructuresRequest', - 'ListCloudExadataInfrastructuresResponse', - 'ListCloudVmClustersRequest', - 'ListCloudVmClustersResponse', - 'ListDbNodesRequest', - 'ListDbNodesResponse', - 'ListDbServersRequest', - 'ListDbServersResponse', - 'ListDbSystemShapesRequest', - 'ListDbSystemShapesResponse', - 'ListEntitlementsRequest', - 'ListEntitlementsResponse', - 'ListGiVersionsRequest', - 'ListGiVersionsResponse', - 'OperationMetadata', - 'RestoreAutonomousDatabaseRequest', - 'CloudVmCluster', - 'CloudVmClusterProperties', - 'DataCollectionOptions', -) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/__init__.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/__init__.py deleted file mode 100644 index 8b3946e07399..000000000000 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/__init__.py +++ /dev/null @@ -1,162 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.oracledatabase_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.oracle_database import OracleDatabaseClient - -from .types.autonomous_database import AllConnectionStrings -from .types.autonomous_database import AutonomousDatabase -from .types.autonomous_database import AutonomousDatabaseApex -from .types.autonomous_database import AutonomousDatabaseConnectionStrings -from .types.autonomous_database import AutonomousDatabaseConnectionUrls -from .types.autonomous_database import AutonomousDatabaseProperties -from .types.autonomous_database import AutonomousDatabaseStandbySummary -from .types.autonomous_database import DatabaseConnectionStringProfile -from .types.autonomous_database import ScheduledOperationDetails -from .types.autonomous_database import DBWorkload -from .types.autonomous_database import GenerateType -from .types.autonomous_database import OperationsInsightsState -from .types.autonomous_database import State -from .types.autonomous_database_character_set import AutonomousDatabaseCharacterSet -from .types.autonomous_db_backup import AutonomousDatabaseBackup -from .types.autonomous_db_backup import AutonomousDatabaseBackupProperties -from .types.autonomous_db_version import AutonomousDbVersion -from .types.common import CustomerContact -from .types.db_node import DbNode -from .types.db_node import DbNodeProperties -from .types.db_server import DbServer -from .types.db_server import DbServerProperties -from .types.db_system_shape import DbSystemShape -from .types.entitlement import CloudAccountDetails -from .types.entitlement import Entitlement -from .types.exadata_infra import CloudExadataInfrastructure -from .types.exadata_infra import CloudExadataInfrastructureProperties -from .types.exadata_infra import MaintenanceWindow -from .types.gi_version import GiVersion -from .types.location_metadata import LocationMetadata -from .types.oracledatabase import CreateAutonomousDatabaseRequest -from .types.oracledatabase import CreateCloudExadataInfrastructureRequest -from .types.oracledatabase import CreateCloudVmClusterRequest -from .types.oracledatabase import DeleteAutonomousDatabaseRequest -from .types.oracledatabase import DeleteCloudExadataInfrastructureRequest -from .types.oracledatabase import DeleteCloudVmClusterRequest -from .types.oracledatabase import GenerateAutonomousDatabaseWalletRequest -from .types.oracledatabase import GenerateAutonomousDatabaseWalletResponse -from .types.oracledatabase import GetAutonomousDatabaseRequest -from .types.oracledatabase import GetCloudExadataInfrastructureRequest -from .types.oracledatabase import GetCloudVmClusterRequest -from .types.oracledatabase import ListAutonomousDatabaseBackupsRequest -from .types.oracledatabase import ListAutonomousDatabaseBackupsResponse -from .types.oracledatabase import ListAutonomousDatabaseCharacterSetsRequest -from .types.oracledatabase import ListAutonomousDatabaseCharacterSetsResponse -from .types.oracledatabase import ListAutonomousDatabasesRequest -from .types.oracledatabase import ListAutonomousDatabasesResponse -from .types.oracledatabase import ListAutonomousDbVersionsRequest -from .types.oracledatabase import ListAutonomousDbVersionsResponse -from .types.oracledatabase import ListCloudExadataInfrastructuresRequest -from .types.oracledatabase import ListCloudExadataInfrastructuresResponse -from .types.oracledatabase import ListCloudVmClustersRequest -from .types.oracledatabase import ListCloudVmClustersResponse -from .types.oracledatabase import ListDbNodesRequest -from .types.oracledatabase import ListDbNodesResponse -from .types.oracledatabase import ListDbServersRequest -from .types.oracledatabase import ListDbServersResponse -from .types.oracledatabase import ListDbSystemShapesRequest -from .types.oracledatabase import ListDbSystemShapesResponse -from .types.oracledatabase import ListEntitlementsRequest -from .types.oracledatabase import ListEntitlementsResponse -from .types.oracledatabase import ListGiVersionsRequest -from .types.oracledatabase import ListGiVersionsResponse -from .types.oracledatabase import OperationMetadata -from .types.oracledatabase import RestoreAutonomousDatabaseRequest -from .types.vm_cluster import CloudVmCluster -from .types.vm_cluster import CloudVmClusterProperties -from .types.vm_cluster import DataCollectionOptions - -__all__ = ( -'AllConnectionStrings', -'AutonomousDatabase', -'AutonomousDatabaseApex', -'AutonomousDatabaseBackup', -'AutonomousDatabaseBackupProperties', -'AutonomousDatabaseCharacterSet', -'AutonomousDatabaseConnectionStrings', -'AutonomousDatabaseConnectionUrls', -'AutonomousDatabaseProperties', -'AutonomousDatabaseStandbySummary', -'AutonomousDbVersion', -'CloudAccountDetails', -'CloudExadataInfrastructure', -'CloudExadataInfrastructureProperties', -'CloudVmCluster', -'CloudVmClusterProperties', -'CreateAutonomousDatabaseRequest', -'CreateCloudExadataInfrastructureRequest', -'CreateCloudVmClusterRequest', -'CustomerContact', -'DBWorkload', -'DataCollectionOptions', -'DatabaseConnectionStringProfile', -'DbNode', -'DbNodeProperties', -'DbServer', -'DbServerProperties', -'DbSystemShape', -'DeleteAutonomousDatabaseRequest', -'DeleteCloudExadataInfrastructureRequest', -'DeleteCloudVmClusterRequest', -'Entitlement', -'GenerateAutonomousDatabaseWalletRequest', -'GenerateAutonomousDatabaseWalletResponse', -'GenerateType', -'GetAutonomousDatabaseRequest', -'GetCloudExadataInfrastructureRequest', -'GetCloudVmClusterRequest', -'GiVersion', -'ListAutonomousDatabaseBackupsRequest', -'ListAutonomousDatabaseBackupsResponse', -'ListAutonomousDatabaseCharacterSetsRequest', -'ListAutonomousDatabaseCharacterSetsResponse', -'ListAutonomousDatabasesRequest', -'ListAutonomousDatabasesResponse', -'ListAutonomousDbVersionsRequest', -'ListAutonomousDbVersionsResponse', -'ListCloudExadataInfrastructuresRequest', -'ListCloudExadataInfrastructuresResponse', -'ListCloudVmClustersRequest', -'ListCloudVmClustersResponse', -'ListDbNodesRequest', -'ListDbNodesResponse', -'ListDbServersRequest', -'ListDbServersResponse', -'ListDbSystemShapesRequest', -'ListDbSystemShapesResponse', -'ListEntitlementsRequest', -'ListEntitlementsResponse', -'ListGiVersionsRequest', -'ListGiVersionsResponse', -'LocationMetadata', -'MaintenanceWindow', -'OperationMetadata', -'OperationsInsightsState', -'OracleDatabaseClient', -'RestoreAutonomousDatabaseRequest', -'ScheduledOperationDetails', -'State', -) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/__init__.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/__init__.py deleted file mode 100644 index 186fe76fa7d5..000000000000 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/__init__.py +++ /dev/null @@ -1,182 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .autonomous_database import ( - AllConnectionStrings, - AutonomousDatabase, - AutonomousDatabaseApex, - AutonomousDatabaseConnectionStrings, - AutonomousDatabaseConnectionUrls, - AutonomousDatabaseProperties, - AutonomousDatabaseStandbySummary, - DatabaseConnectionStringProfile, - ScheduledOperationDetails, - DBWorkload, - GenerateType, - OperationsInsightsState, - State, -) -from .autonomous_database_character_set import ( - AutonomousDatabaseCharacterSet, -) -from .autonomous_db_backup import ( - AutonomousDatabaseBackup, - AutonomousDatabaseBackupProperties, -) -from .autonomous_db_version import ( - AutonomousDbVersion, -) -from .common import ( - CustomerContact, -) -from .db_node import ( - DbNode, - DbNodeProperties, -) -from .db_server import ( - DbServer, - DbServerProperties, -) -from .db_system_shape import ( - DbSystemShape, -) -from .entitlement import ( - CloudAccountDetails, - Entitlement, -) -from .exadata_infra import ( - CloudExadataInfrastructure, - CloudExadataInfrastructureProperties, - MaintenanceWindow, -) -from .gi_version import ( - GiVersion, -) -from .location_metadata import ( - LocationMetadata, -) -from .oracledatabase import ( - CreateAutonomousDatabaseRequest, - CreateCloudExadataInfrastructureRequest, - CreateCloudVmClusterRequest, - DeleteAutonomousDatabaseRequest, - DeleteCloudExadataInfrastructureRequest, - DeleteCloudVmClusterRequest, - GenerateAutonomousDatabaseWalletRequest, - GenerateAutonomousDatabaseWalletResponse, - GetAutonomousDatabaseRequest, - GetCloudExadataInfrastructureRequest, - GetCloudVmClusterRequest, - ListAutonomousDatabaseBackupsRequest, - ListAutonomousDatabaseBackupsResponse, - ListAutonomousDatabaseCharacterSetsRequest, - ListAutonomousDatabaseCharacterSetsResponse, - ListAutonomousDatabasesRequest, - ListAutonomousDatabasesResponse, - ListAutonomousDbVersionsRequest, - ListAutonomousDbVersionsResponse, - ListCloudExadataInfrastructuresRequest, - ListCloudExadataInfrastructuresResponse, - ListCloudVmClustersRequest, - ListCloudVmClustersResponse, - ListDbNodesRequest, - ListDbNodesResponse, - ListDbServersRequest, - ListDbServersResponse, - ListDbSystemShapesRequest, - ListDbSystemShapesResponse, - ListEntitlementsRequest, - ListEntitlementsResponse, - ListGiVersionsRequest, - ListGiVersionsResponse, - OperationMetadata, - RestoreAutonomousDatabaseRequest, -) -from .vm_cluster import ( - CloudVmCluster, - CloudVmClusterProperties, - DataCollectionOptions, -) - -__all__ = ( - 'AllConnectionStrings', - 'AutonomousDatabase', - 'AutonomousDatabaseApex', - 'AutonomousDatabaseConnectionStrings', - 'AutonomousDatabaseConnectionUrls', - 'AutonomousDatabaseProperties', - 'AutonomousDatabaseStandbySummary', - 'DatabaseConnectionStringProfile', - 'ScheduledOperationDetails', - 'DBWorkload', - 'GenerateType', - 'OperationsInsightsState', - 'State', - 'AutonomousDatabaseCharacterSet', - 'AutonomousDatabaseBackup', - 'AutonomousDatabaseBackupProperties', - 'AutonomousDbVersion', - 'CustomerContact', - 'DbNode', - 'DbNodeProperties', - 'DbServer', - 'DbServerProperties', - 'DbSystemShape', - 'CloudAccountDetails', - 'Entitlement', - 'CloudExadataInfrastructure', - 'CloudExadataInfrastructureProperties', - 'MaintenanceWindow', - 'GiVersion', - 'LocationMetadata', - 'CreateAutonomousDatabaseRequest', - 'CreateCloudExadataInfrastructureRequest', - 'CreateCloudVmClusterRequest', - 'DeleteAutonomousDatabaseRequest', - 'DeleteCloudExadataInfrastructureRequest', - 'DeleteCloudVmClusterRequest', - 'GenerateAutonomousDatabaseWalletRequest', - 'GenerateAutonomousDatabaseWalletResponse', - 'GetAutonomousDatabaseRequest', - 'GetCloudExadataInfrastructureRequest', - 'GetCloudVmClusterRequest', - 'ListAutonomousDatabaseBackupsRequest', - 'ListAutonomousDatabaseBackupsResponse', - 'ListAutonomousDatabaseCharacterSetsRequest', - 'ListAutonomousDatabaseCharacterSetsResponse', - 'ListAutonomousDatabasesRequest', - 'ListAutonomousDatabasesResponse', - 'ListAutonomousDbVersionsRequest', - 'ListAutonomousDbVersionsResponse', - 'ListCloudExadataInfrastructuresRequest', - 'ListCloudExadataInfrastructuresResponse', - 'ListCloudVmClustersRequest', - 'ListCloudVmClustersResponse', - 'ListDbNodesRequest', - 'ListDbNodesResponse', - 'ListDbServersRequest', - 'ListDbServersResponse', - 'ListDbSystemShapesRequest', - 'ListDbSystemShapesResponse', - 'ListEntitlementsRequest', - 'ListEntitlementsResponse', - 'ListGiVersionsRequest', - 'ListGiVersionsResponse', - 'OperationMetadata', - 'RestoreAutonomousDatabaseRequest', - 'CloudVmCluster', - 'CloudVmClusterProperties', - 'DataCollectionOptions', -) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/noxfile.py b/owl-bot-staging/google-cloud-oracledatabase/v1/noxfile.py deleted file mode 100644 index f7f7ff3c7bf6..000000000000 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/noxfile.py +++ /dev/null @@ -1,278 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12" -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-oracledatabase' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.12" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/oracledatabase_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - "grpcio", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/oracledatabase_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/.coveragerc b/packages/google-cloud-oracledatabase/.coveragerc similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/.coveragerc rename to packages/google-cloud-oracledatabase/.coveragerc diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/.flake8 b/packages/google-cloud-oracledatabase/.flake8 similarity index 92% rename from owl-bot-staging/google-cloud-oracledatabase/v1/.flake8 rename to packages/google-cloud-oracledatabase/.flake8 index 29227d4cf419..87f6e408c47d 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/.flake8 +++ b/packages/google-cloud-oracledatabase/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,7 +16,7 @@ # Generated by synthtool. DO NOT EDIT! [flake8] -ignore = E203, E266, E501, W503 +ignore = E203, E231, E266, E501, W503 exclude = # Exclude generated code. **/proto/** diff --git a/packages/google-cloud-oracledatabase/.gitignore b/packages/google-cloud-oracledatabase/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-oracledatabase/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-oracledatabase/CHANGELOG.md b/packages/google-cloud-oracledatabase/CHANGELOG.md new file mode 100644 index 000000000000..5ddad421e08f --- /dev/null +++ b/packages/google-cloud-oracledatabase/CHANGELOG.md @@ -0,0 +1 @@ +# Changelog \ No newline at end of file diff --git a/packages/google-cloud-oracledatabase/CODE_OF_CONDUCT.md b/packages/google-cloud-oracledatabase/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-oracledatabase/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-oracledatabase/CONTRIBUTING.rst b/packages/google-cloud-oracledatabase/CONTRIBUTING.rst new file mode 100644 index 000000000000..9b24d1115e7f --- /dev/null +++ b/packages/google-cloud-oracledatabase/CONTRIBUTING.rst @@ -0,0 +1,271 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.12 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.12 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10, 3.11 and 3.12. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-oracledatabase + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-oracledatabase/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-oracledatabase/LICENSE b/packages/google-cloud-oracledatabase/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-oracledatabase/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-oracledatabase/MANIFEST.in b/packages/google-cloud-oracledatabase/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-cloud-oracledatabase/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-oracledatabase/README.rst b/packages/google-cloud-oracledatabase/README.rst new file mode 100644 index 000000000000..3d0109a492fc --- /dev/null +++ b/packages/google-cloud-oracledatabase/README.rst @@ -0,0 +1,108 @@ +Python Client for +================== + +|preview| |pypi| |versions| + +``_: + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-oracledatabase.svg + :target: https://pypi.org/project/google-cloud-oracledatabase/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-oracledatabase.svg + :target: https://pypi.org/project/google-cloud-oracledatabase/ +.. _: +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/google-cloud-oracledatabase/latest/summary_overview +.. _Product Documentation: + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the .`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the .: +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-oracledatabase/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-oracledatabase + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-oracledatabase + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for + to see other available methods on the client. +- Read the ` Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _ Product documentation: +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-oracledatabase/docs/CHANGELOG.md b/packages/google-cloud-oracledatabase/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-oracledatabase/docs/README.rst b/packages/google-cloud-oracledatabase/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-oracledatabase/docs/_static/custom.css b/packages/google-cloud-oracledatabase/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-oracledatabase/docs/_templates/layout.html b/packages/google-cloud-oracledatabase/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/docs/conf.py b/packages/google-cloud-oracledatabase/docs/conf.py similarity index 88% rename from owl-bot-staging/google-cloud-oracledatabase/v1/docs/conf.py rename to packages/google-cloud-oracledatabase/docs/conf.py index 5c45cac86fd3..d3d4e9d5a5c4 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/docs/conf.py +++ b/packages/google-cloud-oracledatabase/docs/conf.py @@ -1,11 +1,11 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -# # google-cloud-oracledatabase documentation build configuration file # # This file is execfile()d with the current directory set to its @@ -25,21 +24,25 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys import os import shlex +import sys # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) -__version__ = "0.1.0" +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" +needs_sphinx = "1.5.5" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -49,26 +52,25 @@ "sphinx.ext.autosummary", "sphinx.ext.intersphinx", "sphinx.ext.coverage", + "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", + "recommonmark", ] # autodoc/autosummary flags autoclass_content = "both" -autodoc_default_flags = ["members"] +autodoc_default_options = {"members": True} autosummary_generate = True # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] source_suffix = [".rst", ".md"] # The encoding of source files. @@ -78,9 +80,9 @@ root_doc = "index" # General information about the project. -project = u"google-cloud-oracledatabase" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit +project = "google-cloud-oracledatabase" +copyright = "2019, Google" +author = "Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -96,7 +98,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = 'en' +language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: @@ -106,7 +108,13 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ["_build"] +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] # The reST default role (used for this markup: `text`) to use for all # documents. @@ -146,7 +154,7 @@ # further. For a list of options available for each theme, see the # documentation. html_theme_options = { - "description": "Google Cloud Client Libraries for Python", + "description": "Google Cloud Client Libraries for google-cloud-oracledatabase", "github_user": "googleapis", "github_repo": "google-cloud-python", "github_banner": True, @@ -258,13 +266,13 @@ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', + #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', + #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. - # 'preamble': '', + #'preamble': '', # Latex figure (float) alignment - # 'figure_align': 'htbp', + #'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples @@ -274,7 +282,7 @@ ( root_doc, "google-cloud-oracledatabase.tex", - u"google-cloud-oracledatabase Documentation", + "google-cloud-oracledatabase Documentation", author, "manual", ) @@ -309,7 +317,7 @@ ( root_doc, "google-cloud-oracledatabase", - u"Google Cloud Oracledatabase Documentation", + "google-cloud-oracledatabase Documentation", [author], 1, ) @@ -328,10 +336,10 @@ ( root_doc, "google-cloud-oracledatabase", - u"google-cloud-oracledatabase Documentation", + "google-cloud-oracledatabase Documentation", author, "google-cloud-oracledatabase", - "GAPIC library for Google Cloud Oracledatabase API", + "google-cloud-oracledatabase Library", "APIs", ) ] @@ -351,14 +359,14 @@ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), } diff --git a/packages/google-cloud-oracledatabase/docs/index.rst b/packages/google-cloud-oracledatabase/docs/index.rst new file mode 100644 index 000000000000..77ff04e09fa0 --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/index.rst @@ -0,0 +1,28 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + oracledatabase_v1/services_ + oracledatabase_v1/types_ + + +Changelog +--------- + +For a list of all ``google-cloud-oracledatabase`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG + +.. toctree:: + :hidden: + + summary_overview.md diff --git a/packages/google-cloud-oracledatabase/docs/multiprocessing.rst b/packages/google-cloud-oracledatabase/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/docs/oracledatabase_v1/oracle_database.rst b/packages/google-cloud-oracledatabase/docs/oracledatabase_v1/oracle_database.rst similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/docs/oracledatabase_v1/oracle_database.rst rename to packages/google-cloud-oracledatabase/docs/oracledatabase_v1/oracle_database.rst diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/docs/oracledatabase_v1/services_.rst b/packages/google-cloud-oracledatabase/docs/oracledatabase_v1/services_.rst similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/docs/oracledatabase_v1/services_.rst rename to packages/google-cloud-oracledatabase/docs/oracledatabase_v1/services_.rst diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/docs/oracledatabase_v1/types_.rst b/packages/google-cloud-oracledatabase/docs/oracledatabase_v1/types_.rst similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/docs/oracledatabase_v1/types_.rst rename to packages/google-cloud-oracledatabase/docs/oracledatabase_v1/types_.rst diff --git a/packages/google-cloud-oracledatabase/docs/summary_overview.md b/packages/google-cloud-oracledatabase/docs/summary_overview.md new file mode 100644 index 000000000000..02a3fcd382b8 --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/summary_overview.md @@ -0,0 +1,22 @@ +[ +This is a templated file. Adding content to this file may result in it being +reverted. Instead, if you want to place additional content, create an +"overview_content.md" file in `docs/` directory. The Sphinx tool will +pick up on the content and merge the content. +]: # + +# API + +Overview of the APIs available for API. + +## All entries + +Classes, methods and properties & attributes for + API. + +[classes](https://cloud.google.com/python/docs/reference/google-cloud-oracledatabase/latest/summary_class.html) + +[methods](https://cloud.google.com/python/docs/reference/google-cloud-oracledatabase/latest/summary_method.html) + +[properties and +attributes](https://cloud.google.com/python/docs/reference/google-cloud-oracledatabase/latest/summary_property.html) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/__init__.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/__init__.py new file mode 100644 index 000000000000..29f02c59e323 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/__init__.py @@ -0,0 +1,177 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.oracledatabase import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.oracledatabase_v1.services.oracle_database.client import ( + OracleDatabaseClient, +) +from google.cloud.oracledatabase_v1.types.autonomous_database import ( + AllConnectionStrings, + AutonomousDatabase, + AutonomousDatabaseApex, + AutonomousDatabaseConnectionStrings, + AutonomousDatabaseConnectionUrls, + AutonomousDatabaseProperties, + AutonomousDatabaseStandbySummary, + DatabaseConnectionStringProfile, + DBWorkload, + GenerateType, + OperationsInsightsState, + ScheduledOperationDetails, + State, +) +from google.cloud.oracledatabase_v1.types.autonomous_database_character_set import ( + AutonomousDatabaseCharacterSet, +) +from google.cloud.oracledatabase_v1.types.autonomous_db_backup import ( + AutonomousDatabaseBackup, + AutonomousDatabaseBackupProperties, +) +from google.cloud.oracledatabase_v1.types.autonomous_db_version import ( + AutonomousDbVersion, +) +from google.cloud.oracledatabase_v1.types.common import CustomerContact +from google.cloud.oracledatabase_v1.types.db_node import DbNode, DbNodeProperties +from google.cloud.oracledatabase_v1.types.db_server import DbServer, DbServerProperties +from google.cloud.oracledatabase_v1.types.db_system_shape import DbSystemShape +from google.cloud.oracledatabase_v1.types.entitlement import ( + CloudAccountDetails, + Entitlement, +) +from google.cloud.oracledatabase_v1.types.exadata_infra import ( + CloudExadataInfrastructure, + CloudExadataInfrastructureProperties, + MaintenanceWindow, +) +from google.cloud.oracledatabase_v1.types.gi_version import GiVersion +from google.cloud.oracledatabase_v1.types.location_metadata import LocationMetadata +from google.cloud.oracledatabase_v1.types.oracledatabase import ( + CreateAutonomousDatabaseRequest, + CreateCloudExadataInfrastructureRequest, + CreateCloudVmClusterRequest, + DeleteAutonomousDatabaseRequest, + DeleteCloudExadataInfrastructureRequest, + DeleteCloudVmClusterRequest, + GenerateAutonomousDatabaseWalletRequest, + GenerateAutonomousDatabaseWalletResponse, + GetAutonomousDatabaseRequest, + GetCloudExadataInfrastructureRequest, + GetCloudVmClusterRequest, + ListAutonomousDatabaseBackupsRequest, + ListAutonomousDatabaseBackupsResponse, + ListAutonomousDatabaseCharacterSetsRequest, + ListAutonomousDatabaseCharacterSetsResponse, + ListAutonomousDatabasesRequest, + ListAutonomousDatabasesResponse, + ListAutonomousDbVersionsRequest, + ListAutonomousDbVersionsResponse, + ListCloudExadataInfrastructuresRequest, + ListCloudExadataInfrastructuresResponse, + ListCloudVmClustersRequest, + ListCloudVmClustersResponse, + ListDbNodesRequest, + ListDbNodesResponse, + ListDbServersRequest, + ListDbServersResponse, + ListDbSystemShapesRequest, + ListDbSystemShapesResponse, + ListEntitlementsRequest, + ListEntitlementsResponse, + ListGiVersionsRequest, + ListGiVersionsResponse, + OperationMetadata, + RestoreAutonomousDatabaseRequest, +) +from google.cloud.oracledatabase_v1.types.vm_cluster import ( + CloudVmCluster, + CloudVmClusterProperties, + DataCollectionOptions, +) + +__all__ = ( + "OracleDatabaseClient", + "AllConnectionStrings", + "AutonomousDatabase", + "AutonomousDatabaseApex", + "AutonomousDatabaseConnectionStrings", + "AutonomousDatabaseConnectionUrls", + "AutonomousDatabaseProperties", + "AutonomousDatabaseStandbySummary", + "DatabaseConnectionStringProfile", + "ScheduledOperationDetails", + "DBWorkload", + "GenerateType", + "OperationsInsightsState", + "State", + "AutonomousDatabaseCharacterSet", + "AutonomousDatabaseBackup", + "AutonomousDatabaseBackupProperties", + "AutonomousDbVersion", + "CustomerContact", + "DbNode", + "DbNodeProperties", + "DbServer", + "DbServerProperties", + "DbSystemShape", + "CloudAccountDetails", + "Entitlement", + "CloudExadataInfrastructure", + "CloudExadataInfrastructureProperties", + "MaintenanceWindow", + "GiVersion", + "LocationMetadata", + "CreateAutonomousDatabaseRequest", + "CreateCloudExadataInfrastructureRequest", + "CreateCloudVmClusterRequest", + "DeleteAutonomousDatabaseRequest", + "DeleteCloudExadataInfrastructureRequest", + "DeleteCloudVmClusterRequest", + "GenerateAutonomousDatabaseWalletRequest", + "GenerateAutonomousDatabaseWalletResponse", + "GetAutonomousDatabaseRequest", + "GetCloudExadataInfrastructureRequest", + "GetCloudVmClusterRequest", + "ListAutonomousDatabaseBackupsRequest", + "ListAutonomousDatabaseBackupsResponse", + "ListAutonomousDatabaseCharacterSetsRequest", + "ListAutonomousDatabaseCharacterSetsResponse", + "ListAutonomousDatabasesRequest", + "ListAutonomousDatabasesResponse", + "ListAutonomousDbVersionsRequest", + "ListAutonomousDbVersionsResponse", + "ListCloudExadataInfrastructuresRequest", + "ListCloudExadataInfrastructuresResponse", + "ListCloudVmClustersRequest", + "ListCloudVmClustersResponse", + "ListDbNodesRequest", + "ListDbNodesResponse", + "ListDbServersRequest", + "ListDbServersResponse", + "ListDbSystemShapesRequest", + "ListDbSystemShapesResponse", + "ListEntitlementsRequest", + "ListEntitlementsResponse", + "ListGiVersionsRequest", + "ListGiVersionsResponse", + "OperationMetadata", + "RestoreAutonomousDatabaseRequest", + "CloudVmCluster", + "CloudVmClusterProperties", + "DataCollectionOptions", +) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase/gapic_version.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/gapic_version.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase/gapic_version.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase/gapic_version.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase/py.typed b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/py.typed similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase/py.typed rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase/py.typed diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/__init__.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/__init__.py new file mode 100644 index 000000000000..225fa0bdbb4c --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/__init__.py @@ -0,0 +1,168 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.oracledatabase_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.oracle_database import OracleDatabaseClient +from .types.autonomous_database import ( + AllConnectionStrings, + AutonomousDatabase, + AutonomousDatabaseApex, + AutonomousDatabaseConnectionStrings, + AutonomousDatabaseConnectionUrls, + AutonomousDatabaseProperties, + AutonomousDatabaseStandbySummary, + DatabaseConnectionStringProfile, + DBWorkload, + GenerateType, + OperationsInsightsState, + ScheduledOperationDetails, + State, +) +from .types.autonomous_database_character_set import AutonomousDatabaseCharacterSet +from .types.autonomous_db_backup import ( + AutonomousDatabaseBackup, + AutonomousDatabaseBackupProperties, +) +from .types.autonomous_db_version import AutonomousDbVersion +from .types.common import CustomerContact +from .types.db_node import DbNode, DbNodeProperties +from .types.db_server import DbServer, DbServerProperties +from .types.db_system_shape import DbSystemShape +from .types.entitlement import CloudAccountDetails, Entitlement +from .types.exadata_infra import ( + CloudExadataInfrastructure, + CloudExadataInfrastructureProperties, + MaintenanceWindow, +) +from .types.gi_version import GiVersion +from .types.location_metadata import LocationMetadata +from .types.oracledatabase import ( + CreateAutonomousDatabaseRequest, + CreateCloudExadataInfrastructureRequest, + CreateCloudVmClusterRequest, + DeleteAutonomousDatabaseRequest, + DeleteCloudExadataInfrastructureRequest, + DeleteCloudVmClusterRequest, + GenerateAutonomousDatabaseWalletRequest, + GenerateAutonomousDatabaseWalletResponse, + GetAutonomousDatabaseRequest, + GetCloudExadataInfrastructureRequest, + GetCloudVmClusterRequest, + ListAutonomousDatabaseBackupsRequest, + ListAutonomousDatabaseBackupsResponse, + ListAutonomousDatabaseCharacterSetsRequest, + ListAutonomousDatabaseCharacterSetsResponse, + ListAutonomousDatabasesRequest, + ListAutonomousDatabasesResponse, + ListAutonomousDbVersionsRequest, + ListAutonomousDbVersionsResponse, + ListCloudExadataInfrastructuresRequest, + ListCloudExadataInfrastructuresResponse, + ListCloudVmClustersRequest, + ListCloudVmClustersResponse, + ListDbNodesRequest, + ListDbNodesResponse, + ListDbServersRequest, + ListDbServersResponse, + ListDbSystemShapesRequest, + ListDbSystemShapesResponse, + ListEntitlementsRequest, + ListEntitlementsResponse, + ListGiVersionsRequest, + ListGiVersionsResponse, + OperationMetadata, + RestoreAutonomousDatabaseRequest, +) +from .types.vm_cluster import ( + CloudVmCluster, + CloudVmClusterProperties, + DataCollectionOptions, +) + +__all__ = ( + "AllConnectionStrings", + "AutonomousDatabase", + "AutonomousDatabaseApex", + "AutonomousDatabaseBackup", + "AutonomousDatabaseBackupProperties", + "AutonomousDatabaseCharacterSet", + "AutonomousDatabaseConnectionStrings", + "AutonomousDatabaseConnectionUrls", + "AutonomousDatabaseProperties", + "AutonomousDatabaseStandbySummary", + "AutonomousDbVersion", + "CloudAccountDetails", + "CloudExadataInfrastructure", + "CloudExadataInfrastructureProperties", + "CloudVmCluster", + "CloudVmClusterProperties", + "CreateAutonomousDatabaseRequest", + "CreateCloudExadataInfrastructureRequest", + "CreateCloudVmClusterRequest", + "CustomerContact", + "DBWorkload", + "DataCollectionOptions", + "DatabaseConnectionStringProfile", + "DbNode", + "DbNodeProperties", + "DbServer", + "DbServerProperties", + "DbSystemShape", + "DeleteAutonomousDatabaseRequest", + "DeleteCloudExadataInfrastructureRequest", + "DeleteCloudVmClusterRequest", + "Entitlement", + "GenerateAutonomousDatabaseWalletRequest", + "GenerateAutonomousDatabaseWalletResponse", + "GenerateType", + "GetAutonomousDatabaseRequest", + "GetCloudExadataInfrastructureRequest", + "GetCloudVmClusterRequest", + "GiVersion", + "ListAutonomousDatabaseBackupsRequest", + "ListAutonomousDatabaseBackupsResponse", + "ListAutonomousDatabaseCharacterSetsRequest", + "ListAutonomousDatabaseCharacterSetsResponse", + "ListAutonomousDatabasesRequest", + "ListAutonomousDatabasesResponse", + "ListAutonomousDbVersionsRequest", + "ListAutonomousDbVersionsResponse", + "ListCloudExadataInfrastructuresRequest", + "ListCloudExadataInfrastructuresResponse", + "ListCloudVmClustersRequest", + "ListCloudVmClustersResponse", + "ListDbNodesRequest", + "ListDbNodesResponse", + "ListDbServersRequest", + "ListDbServersResponse", + "ListDbSystemShapesRequest", + "ListDbSystemShapesResponse", + "ListEntitlementsRequest", + "ListEntitlementsResponse", + "ListGiVersionsRequest", + "ListGiVersionsResponse", + "LocationMetadata", + "MaintenanceWindow", + "OperationMetadata", + "OperationsInsightsState", + "OracleDatabaseClient", + "RestoreAutonomousDatabaseRequest", + "ScheduledOperationDetails", + "State", +) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/gapic_metadata.json b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_metadata.json similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/gapic_metadata.json rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_metadata.json diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/gapic_version.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_version.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/gapic_version.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_version.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/py.typed b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/py.typed similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/py.typed rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/py.typed diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/__init__.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/__init__.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/__init__.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/__init__.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/__init__.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/__init__.py similarity index 93% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/__init__.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/__init__.py index 4b2d17bc5ef2..947b9516b5e7 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/__init__.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/__init__.py @@ -15,6 +15,4 @@ # from .client import OracleDatabaseClient -__all__ = ( - 'OracleDatabaseClient', -) +__all__ = ("OracleDatabaseClient",) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/client.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/client.py similarity index 79% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/client.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/client.py index f1332e871961..9a4182820e59 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/client.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/client.py @@ -16,20 +16,32 @@ from collections import OrderedDict import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) import warnings -from google.cloud.oracledatabase_v1 import gapic_version as package_version - from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.oracledatabase_v1 import gapic_version as package_version try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -38,25 +50,31 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.cloud.oracledatabase_v1.services.oracle_database import pagers -from google.cloud.oracledatabase_v1.types import autonomous_database -from google.cloud.oracledatabase_v1.types import autonomous_database as gco_autonomous_database -from google.cloud.oracledatabase_v1.types import autonomous_database_character_set -from google.cloud.oracledatabase_v1.types import autonomous_db_backup -from google.cloud.oracledatabase_v1.types import autonomous_db_version -from google.cloud.oracledatabase_v1.types import db_node -from google.cloud.oracledatabase_v1.types import db_server -from google.cloud.oracledatabase_v1.types import db_system_shape -from google.cloud.oracledatabase_v1.types import entitlement -from google.cloud.oracledatabase_v1.types import exadata_infra -from google.cloud.oracledatabase_v1.types import gi_version -from google.cloud.oracledatabase_v1.types import oracledatabase -from google.cloud.oracledatabase_v1.types import vm_cluster -from google.longrunning import operations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import OracleDatabaseTransport, DEFAULT_CLIENT_INFO + +from google.cloud.oracledatabase_v1.services.oracle_database import pagers +from google.cloud.oracledatabase_v1.types import ( + autonomous_database_character_set, + autonomous_db_backup, + autonomous_db_version, + db_node, + db_server, + db_system_shape, + entitlement, + exadata_infra, + gi_version, + oracledatabase, + vm_cluster, +) +from google.cloud.oracledatabase_v1.types import ( + autonomous_database as gco_autonomous_database, +) +from google.cloud.oracledatabase_v1.types import autonomous_database + +from .transports.base import DEFAULT_CLIENT_INFO, OracleDatabaseTransport from .transports.rest import OracleDatabaseRestTransport @@ -67,12 +85,16 @@ class OracleDatabaseClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ - _transport_registry = OrderedDict() # type: Dict[str, Type[OracleDatabaseTransport]] + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[OracleDatabaseTransport]] _transport_registry["rest"] = OracleDatabaseRestTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[OracleDatabaseTransport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[OracleDatabaseTransport]: """Returns an appropriate transport class. Args: @@ -164,8 +186,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: OracleDatabaseClient: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) + credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -182,194 +203,353 @@ def transport(self) -> OracleDatabaseTransport: return self._transport @staticmethod - def autonomous_database_path(project: str,location: str,autonomous_database: str,) -> str: + def autonomous_database_path( + project: str, + location: str, + autonomous_database: str, + ) -> str: """Returns a fully-qualified autonomous_database string.""" - return "projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}".format(project=project, location=location, autonomous_database=autonomous_database, ) + return "projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}".format( + project=project, + location=location, + autonomous_database=autonomous_database, + ) @staticmethod - def parse_autonomous_database_path(path: str) -> Dict[str,str]: + def parse_autonomous_database_path(path: str) -> Dict[str, str]: """Parses a autonomous_database path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/autonomousDatabases/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/autonomousDatabases/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def autonomous_database_backup_path(project: str,location: str,autonomous_database_backup: str,) -> str: + def autonomous_database_backup_path( + project: str, + location: str, + autonomous_database_backup: str, + ) -> str: """Returns a fully-qualified autonomous_database_backup string.""" - return "projects/{project}/locations/{location}/autonomousDatabaseBackups/{autonomous_database_backup}".format(project=project, location=location, autonomous_database_backup=autonomous_database_backup, ) + return "projects/{project}/locations/{location}/autonomousDatabaseBackups/{autonomous_database_backup}".format( + project=project, + location=location, + autonomous_database_backup=autonomous_database_backup, + ) @staticmethod - def parse_autonomous_database_backup_path(path: str) -> Dict[str,str]: + def parse_autonomous_database_backup_path(path: str) -> Dict[str, str]: """Parses a autonomous_database_backup path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/autonomousDatabaseBackups/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/autonomousDatabaseBackups/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def autonomous_database_character_set_path(project: str,location: str,autonomous_database_character_set: str,) -> str: + def autonomous_database_character_set_path( + project: str, + location: str, + autonomous_database_character_set: str, + ) -> str: """Returns a fully-qualified autonomous_database_character_set string.""" - return "projects/{project}/locations/{location}/autonomousDatabaseCharacterSets/{autonomous_database_character_set}".format(project=project, location=location, autonomous_database_character_set=autonomous_database_character_set, ) + return "projects/{project}/locations/{location}/autonomousDatabaseCharacterSets/{autonomous_database_character_set}".format( + project=project, + location=location, + autonomous_database_character_set=autonomous_database_character_set, + ) @staticmethod - def parse_autonomous_database_character_set_path(path: str) -> Dict[str,str]: + def parse_autonomous_database_character_set_path(path: str) -> Dict[str, str]: """Parses a autonomous_database_character_set path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/autonomousDatabaseCharacterSets/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/autonomousDatabaseCharacterSets/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def autonomous_db_version_path(project: str,location: str,autonomous_db_version: str,) -> str: + def autonomous_db_version_path( + project: str, + location: str, + autonomous_db_version: str, + ) -> str: """Returns a fully-qualified autonomous_db_version string.""" - return "projects/{project}/locations/{location}/autonomousDbVersions/{autonomous_db_version}".format(project=project, location=location, autonomous_db_version=autonomous_db_version, ) + return "projects/{project}/locations/{location}/autonomousDbVersions/{autonomous_db_version}".format( + project=project, + location=location, + autonomous_db_version=autonomous_db_version, + ) @staticmethod - def parse_autonomous_db_version_path(path: str) -> Dict[str,str]: + def parse_autonomous_db_version_path(path: str) -> Dict[str, str]: """Parses a autonomous_db_version path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/autonomousDbVersions/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/autonomousDbVersions/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def cloud_exadata_infrastructure_path(project: str,location: str,cloud_exadata_infrastructure: str,) -> str: + def cloud_exadata_infrastructure_path( + project: str, + location: str, + cloud_exadata_infrastructure: str, + ) -> str: """Returns a fully-qualified cloud_exadata_infrastructure string.""" - return "projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}".format(project=project, location=location, cloud_exadata_infrastructure=cloud_exadata_infrastructure, ) + return "projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}".format( + project=project, + location=location, + cloud_exadata_infrastructure=cloud_exadata_infrastructure, + ) @staticmethod - def parse_cloud_exadata_infrastructure_path(path: str) -> Dict[str,str]: + def parse_cloud_exadata_infrastructure_path(path: str) -> Dict[str, str]: """Parses a cloud_exadata_infrastructure path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/cloudExadataInfrastructures/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/cloudExadataInfrastructures/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def cloud_vm_cluster_path(project: str,location: str,cloud_vm_cluster: str,) -> str: + def cloud_vm_cluster_path( + project: str, + location: str, + cloud_vm_cluster: str, + ) -> str: """Returns a fully-qualified cloud_vm_cluster string.""" - return "projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}".format(project=project, location=location, cloud_vm_cluster=cloud_vm_cluster, ) + return "projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}".format( + project=project, + location=location, + cloud_vm_cluster=cloud_vm_cluster, + ) @staticmethod - def parse_cloud_vm_cluster_path(path: str) -> Dict[str,str]: + def parse_cloud_vm_cluster_path(path: str) -> Dict[str, str]: """Parses a cloud_vm_cluster path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/cloudVmClusters/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/cloudVmClusters/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def db_node_path(project: str,location: str,cloud_vm_cluster: str,db_node: str,) -> str: + def db_node_path( + project: str, + location: str, + cloud_vm_cluster: str, + db_node: str, + ) -> str: """Returns a fully-qualified db_node string.""" - return "projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}/dbNodes/{db_node}".format(project=project, location=location, cloud_vm_cluster=cloud_vm_cluster, db_node=db_node, ) + return "projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}/dbNodes/{db_node}".format( + project=project, + location=location, + cloud_vm_cluster=cloud_vm_cluster, + db_node=db_node, + ) @staticmethod - def parse_db_node_path(path: str) -> Dict[str,str]: + def parse_db_node_path(path: str) -> Dict[str, str]: """Parses a db_node path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/cloudVmClusters/(?P.+?)/dbNodes/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/cloudVmClusters/(?P.+?)/dbNodes/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def db_server_path(project: str,location: str,cloud_exadata_infrastructure: str,db_server: str,) -> str: + def db_server_path( + project: str, + location: str, + cloud_exadata_infrastructure: str, + db_server: str, + ) -> str: """Returns a fully-qualified db_server string.""" - return "projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}/dbServers/{db_server}".format(project=project, location=location, cloud_exadata_infrastructure=cloud_exadata_infrastructure, db_server=db_server, ) + return "projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}/dbServers/{db_server}".format( + project=project, + location=location, + cloud_exadata_infrastructure=cloud_exadata_infrastructure, + db_server=db_server, + ) @staticmethod - def parse_db_server_path(path: str) -> Dict[str,str]: + def parse_db_server_path(path: str) -> Dict[str, str]: """Parses a db_server path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/cloudExadataInfrastructures/(?P.+?)/dbServers/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/cloudExadataInfrastructures/(?P.+?)/dbServers/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def db_system_shape_path(project: str,location: str,db_system_shape: str,) -> str: + def db_system_shape_path( + project: str, + location: str, + db_system_shape: str, + ) -> str: """Returns a fully-qualified db_system_shape string.""" - return "projects/{project}/locations/{location}/dbSystemShapes/{db_system_shape}".format(project=project, location=location, db_system_shape=db_system_shape, ) + return "projects/{project}/locations/{location}/dbSystemShapes/{db_system_shape}".format( + project=project, + location=location, + db_system_shape=db_system_shape, + ) @staticmethod - def parse_db_system_shape_path(path: str) -> Dict[str,str]: + def parse_db_system_shape_path(path: str) -> Dict[str, str]: """Parses a db_system_shape path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/dbSystemShapes/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dbSystemShapes/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def entitlement_path(project: str,location: str,entitlement: str,) -> str: + def entitlement_path( + project: str, + location: str, + entitlement: str, + ) -> str: """Returns a fully-qualified entitlement string.""" - return "projects/{project}/locations/{location}/entitlements/{entitlement}".format(project=project, location=location, entitlement=entitlement, ) + return ( + "projects/{project}/locations/{location}/entitlements/{entitlement}".format( + project=project, + location=location, + entitlement=entitlement, + ) + ) @staticmethod - def parse_entitlement_path(path: str) -> Dict[str,str]: + def parse_entitlement_path(path: str) -> Dict[str, str]: """Parses a entitlement path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/entitlements/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/entitlements/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def gi_version_path(project: str,location: str,gi_version: str,) -> str: + def gi_version_path( + project: str, + location: str, + gi_version: str, + ) -> str: """Returns a fully-qualified gi_version string.""" - return "projects/{project}/locations/{location}/giVersions/{gi_version}".format(project=project, location=location, gi_version=gi_version, ) + return "projects/{project}/locations/{location}/giVersions/{gi_version}".format( + project=project, + location=location, + gi_version=gi_version, + ) @staticmethod - def parse_gi_version_path(path: str) -> Dict[str,str]: + def parse_gi_version_path(path: str) -> Dict[str, str]: """Parses a gi_version path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/giVersions/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/giVersions/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def network_path(project: str,network: str,) -> str: + def network_path( + project: str, + network: str, + ) -> str: """Returns a fully-qualified network string.""" - return "projects/{project}/global/networks/{network}".format(project=project, network=network, ) + return "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) @staticmethod - def parse_network_path(path: str) -> Dict[str,str]: + def parse_network_path(path: str) -> Dict[str, str]: """Parses a network path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/global/networks/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/global/networks/(?P.+?)$", path + ) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path(path: str) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path(path: str) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path(path: str) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path(path: str) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path(path: str) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -401,16 +581,22 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Figure out the client cert source to use. client_cert_source = None @@ -423,7 +609,9 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio # Figure out which api endpoint to use. if client_options.api_endpoint is not None: api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): api_endpoint = cls.DEFAULT_MTLS_ENDPOINT else: api_endpoint = cls.DEFAULT_ENDPOINT @@ -444,13 +632,19 @@ def _read_environment_variables(): google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT is not any of ["auto", "never", "always"]. """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) return use_client_cert == "true", use_mtls_endpoint, universe_domain_env @staticmethod @@ -473,7 +667,9 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): return client_cert_source @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): """Return the API endpoint used by the client. Args: @@ -489,17 +685,25 @@ def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtl """ if api_override is not None: api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): _default_universe = OracleDatabaseClient._DEFAULT_UNIVERSE if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) api_endpoint = OracleDatabaseClient.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + api_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) return api_endpoint @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: """Return the universe domain used by the client. Args: @@ -522,8 +726,9 @@ def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_ return universe_domain @staticmethod - def _compare_universes(client_universe: str, - credentials: ga_credentials.Credentials) -> bool: + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: """Returns True iff the universe domains used by the client and credentials match. Args: @@ -541,11 +746,13 @@ def _compare_universes(client_universe: str, credentials_universe = getattr(credentials, "universe_domain", default_universe) if client_universe != credentials_universe: - raise ValueError("The configured universe domain " + raise ValueError( + "The configured universe domain " f"({client_universe}) does not match the universe domain " f"found in the credentials ({credentials_universe}). " "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") + f"`{default_universe}` is the default." + ) return True def _validate_universe_domain(self): @@ -557,8 +764,12 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = (self._is_universe_domain_valid or - OracleDatabaseClient._compare_universes(self.universe_domain, self.transport._credentials)) + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or OracleDatabaseClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) return self._is_universe_domain_valid @property @@ -579,12 +790,16 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, OracleDatabaseTransport, Callable[..., OracleDatabaseTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, OracleDatabaseTransport, Callable[..., OracleDatabaseTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the oracle database client. Args: @@ -639,21 +854,33 @@ def __init__(self, *, self._client_options = client_options_lib.from_dict(self._client_options) if self._client_options is None: self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = OracleDatabaseClient._read_environment_variables() - self._client_cert_source = OracleDatabaseClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = OracleDatabaseClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = OracleDatabaseClient._read_environment_variables() + self._client_cert_source = OracleDatabaseClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = OracleDatabaseClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` # Initialize the universe domain validation. self._is_universe_domain_valid = False api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport @@ -662,8 +889,10 @@ def __init__(self, *, if transport_provided: # transport is a OracleDatabaseTransport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " @@ -672,20 +901,29 @@ def __init__(self, *, self._transport = cast(OracleDatabaseTransport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - OracleDatabaseClient._get_api_endpoint( + self._api_endpoint = ( + self._api_endpoint + or OracleDatabaseClient._get_api_endpoint( self._client_options.api_endpoint, self._client_cert_source, self._universe_domain, - self._use_mtls_endpoint)) + self._use_mtls_endpoint, + ) + ) if not transport_provided: import google.auth._default # type: ignore - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) - transport_init: Union[Type[OracleDatabaseTransport], Callable[..., OracleDatabaseTransport]] = ( + transport_init: Union[ + Type[OracleDatabaseTransport], Callable[..., OracleDatabaseTransport] + ] = ( OracleDatabaseClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., OracleDatabaseTransport], transport) @@ -703,14 +941,17 @@ def __init__(self, *, api_audience=self._client_options.api_audience, ) - def list_cloud_exadata_infrastructures(self, - request: Optional[Union[oracledatabase.ListCloudExadataInfrastructuresRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListCloudExadataInfrastructuresPager: + def list_cloud_exadata_infrastructures( + self, + request: Optional[ + Union[oracledatabase.ListCloudExadataInfrastructuresRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCloudExadataInfrastructuresPager: r"""Lists Exadata Infrastructures in a given project and location. @@ -772,12 +1013,16 @@ def sample_list_cloud_exadata_infrastructures(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, oracledatabase.ListCloudExadataInfrastructuresRequest): + if not isinstance( + request, oracledatabase.ListCloudExadataInfrastructuresRequest + ): request = oracledatabase.ListCloudExadataInfrastructuresRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -786,14 +1031,14 @@ def sample_list_cloud_exadata_infrastructures(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_cloud_exadata_infrastructures] + rpc = self._transport._wrapped_methods[ + self._transport.list_cloud_exadata_infrastructures + ] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -821,14 +1066,17 @@ def sample_list_cloud_exadata_infrastructures(): # Done; return the response. return response - def get_cloud_exadata_infrastructure(self, - request: Optional[Union[oracledatabase.GetCloudExadataInfrastructureRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> exadata_infra.CloudExadataInfrastructure: + def get_cloud_exadata_infrastructure( + self, + request: Optional[ + Union[oracledatabase.GetCloudExadataInfrastructureRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> exadata_infra.CloudExadataInfrastructure: r"""Gets details of a single Exadata Infrastructure. .. code-block:: python @@ -886,8 +1134,10 @@ def sample_get_cloud_exadata_infrastructure(): # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -900,14 +1150,14 @@ def sample_get_cloud_exadata_infrastructure(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_cloud_exadata_infrastructure] + rpc = self._transport._wrapped_methods[ + self._transport.get_cloud_exadata_infrastructure + ] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -924,16 +1174,21 @@ def sample_get_cloud_exadata_infrastructure(): # Done; return the response. return response - def create_cloud_exadata_infrastructure(self, - request: Optional[Union[oracledatabase.CreateCloudExadataInfrastructureRequest, dict]] = None, - *, - parent: Optional[str] = None, - cloud_exadata_infrastructure: Optional[exadata_infra.CloudExadataInfrastructure] = None, - cloud_exadata_infrastructure_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def create_cloud_exadata_infrastructure( + self, + request: Optional[ + Union[oracledatabase.CreateCloudExadataInfrastructureRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + cloud_exadata_infrastructure: Optional[ + exadata_infra.CloudExadataInfrastructure + ] = None, + cloud_exadata_infrastructure_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Creates a new Exadata Infrastructure in a given project and location. @@ -1014,14 +1269,20 @@ def sample_create_cloud_exadata_infrastructure(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, cloud_exadata_infrastructure, cloud_exadata_infrastructure_id]) + has_flattened_params = any( + [parent, cloud_exadata_infrastructure, cloud_exadata_infrastructure_id] + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, oracledatabase.CreateCloudExadataInfrastructureRequest): + if not isinstance( + request, oracledatabase.CreateCloudExadataInfrastructureRequest + ): request = oracledatabase.CreateCloudExadataInfrastructureRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1030,18 +1291,20 @@ def sample_create_cloud_exadata_infrastructure(): if cloud_exadata_infrastructure is not None: request.cloud_exadata_infrastructure = cloud_exadata_infrastructure if cloud_exadata_infrastructure_id is not None: - request.cloud_exadata_infrastructure_id = cloud_exadata_infrastructure_id + request.cloud_exadata_infrastructure_id = ( + cloud_exadata_infrastructure_id + ) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_cloud_exadata_infrastructure] + rpc = self._transport._wrapped_methods[ + self._transport.create_cloud_exadata_infrastructure + ] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1066,14 +1329,17 @@ def sample_create_cloud_exadata_infrastructure(): # Done; return the response. return response - def delete_cloud_exadata_infrastructure(self, - request: Optional[Union[oracledatabase.DeleteCloudExadataInfrastructureRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def delete_cloud_exadata_infrastructure( + self, + request: Optional[ + Union[oracledatabase.DeleteCloudExadataInfrastructureRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Deletes a single Exadata Infrastructure. .. code-block:: python @@ -1144,12 +1410,16 @@ def sample_delete_cloud_exadata_infrastructure(): # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, oracledatabase.DeleteCloudExadataInfrastructureRequest): + if not isinstance( + request, oracledatabase.DeleteCloudExadataInfrastructureRequest + ): request = oracledatabase.DeleteCloudExadataInfrastructureRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1158,14 +1428,14 @@ def sample_delete_cloud_exadata_infrastructure(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_cloud_exadata_infrastructure] + rpc = self._transport._wrapped_methods[ + self._transport.delete_cloud_exadata_infrastructure + ] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1190,14 +1460,17 @@ def sample_delete_cloud_exadata_infrastructure(): # Done; return the response. return response - def list_cloud_vm_clusters(self, - request: Optional[Union[oracledatabase.ListCloudVmClustersRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListCloudVmClustersPager: + def list_cloud_vm_clusters( + self, + request: Optional[ + Union[oracledatabase.ListCloudVmClustersRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCloudVmClustersPager: r"""Lists the VM Clusters in a given project and location. @@ -1258,8 +1531,10 @@ def sample_list_cloud_vm_clusters(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1274,12 +1549,10 @@ def sample_list_cloud_vm_clusters(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_cloud_vm_clusters] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1307,14 +1580,15 @@ def sample_list_cloud_vm_clusters(): # Done; return the response. return response - def get_cloud_vm_cluster(self, - request: Optional[Union[oracledatabase.GetCloudVmClusterRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> vm_cluster.CloudVmCluster: + def get_cloud_vm_cluster( + self, + request: Optional[Union[oracledatabase.GetCloudVmClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vm_cluster.CloudVmCluster: r"""Gets details of a single VM Cluster. .. code-block:: python @@ -1372,8 +1646,10 @@ def sample_get_cloud_vm_cluster(): # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1388,12 +1664,10 @@ def sample_get_cloud_vm_cluster(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_cloud_vm_cluster] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1410,16 +1684,19 @@ def sample_get_cloud_vm_cluster(): # Done; return the response. return response - def create_cloud_vm_cluster(self, - request: Optional[Union[oracledatabase.CreateCloudVmClusterRequest, dict]] = None, - *, - parent: Optional[str] = None, - cloud_vm_cluster: Optional[vm_cluster.CloudVmCluster] = None, - cloud_vm_cluster_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def create_cloud_vm_cluster( + self, + request: Optional[ + Union[oracledatabase.CreateCloudVmClusterRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + cloud_vm_cluster: Optional[vm_cluster.CloudVmCluster] = None, + cloud_vm_cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Creates a new VM Cluster in a given project and location. @@ -1506,8 +1783,10 @@ def sample_create_cloud_vm_cluster(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, cloud_vm_cluster, cloud_vm_cluster_id]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1526,12 +1805,10 @@ def sample_create_cloud_vm_cluster(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_cloud_vm_cluster] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1556,14 +1833,17 @@ def sample_create_cloud_vm_cluster(): # Done; return the response. return response - def delete_cloud_vm_cluster(self, - request: Optional[Union[oracledatabase.DeleteCloudVmClusterRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def delete_cloud_vm_cluster( + self, + request: Optional[ + Union[oracledatabase.DeleteCloudVmClusterRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Deletes a single VM Cluster. .. code-block:: python @@ -1634,8 +1914,10 @@ def sample_delete_cloud_vm_cluster(): # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1650,12 +1932,10 @@ def sample_delete_cloud_vm_cluster(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_cloud_vm_cluster] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1680,14 +1960,15 @@ def sample_delete_cloud_vm_cluster(): # Done; return the response. return response - def list_entitlements(self, - request: Optional[Union[oracledatabase.ListEntitlementsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListEntitlementsPager: + def list_entitlements( + self, + request: Optional[Union[oracledatabase.ListEntitlementsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEntitlementsPager: r"""Lists the entitlements in a given project. .. code-block:: python @@ -1747,8 +2028,10 @@ def sample_list_entitlements(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1763,12 +2046,10 @@ def sample_list_entitlements(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_entitlements] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1796,14 +2077,15 @@ def sample_list_entitlements(): # Done; return the response. return response - def list_db_servers(self, - request: Optional[Union[oracledatabase.ListDbServersRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDbServersPager: + def list_db_servers( + self, + request: Optional[Union[oracledatabase.ListDbServersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDbServersPager: r"""Lists the database servers of an Exadata Infrastructure instance. @@ -1864,8 +2146,10 @@ def sample_list_db_servers(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1880,12 +2164,10 @@ def sample_list_db_servers(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_db_servers] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1913,14 +2195,15 @@ def sample_list_db_servers(): # Done; return the response. return response - def list_db_nodes(self, - request: Optional[Union[oracledatabase.ListDbNodesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDbNodesPager: + def list_db_nodes( + self, + request: Optional[Union[oracledatabase.ListDbNodesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDbNodesPager: r"""Lists the database nodes of a VM Cluster. .. code-block:: python @@ -1980,8 +2263,10 @@ def sample_list_db_nodes(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1996,12 +2281,10 @@ def sample_list_db_nodes(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_db_nodes] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2029,14 +2312,15 @@ def sample_list_db_nodes(): # Done; return the response. return response - def list_gi_versions(self, - request: Optional[Union[oracledatabase.ListGiVersionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListGiVersionsPager: + def list_gi_versions( + self, + request: Optional[Union[oracledatabase.ListGiVersionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListGiVersionsPager: r"""Lists all the valid Oracle Grid Infrastructure (GI) versions for the given project and location. @@ -2098,8 +2382,10 @@ def sample_list_gi_versions(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2114,12 +2400,10 @@ def sample_list_gi_versions(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_gi_versions] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2147,14 +2431,15 @@ def sample_list_gi_versions(): # Done; return the response. return response - def list_db_system_shapes(self, - request: Optional[Union[oracledatabase.ListDbSystemShapesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDbSystemShapesPager: + def list_db_system_shapes( + self, + request: Optional[Union[oracledatabase.ListDbSystemShapesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDbSystemShapesPager: r"""Lists the database system shapes available for the project and location. @@ -2216,8 +2501,10 @@ def sample_list_db_system_shapes(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2232,12 +2519,10 @@ def sample_list_db_system_shapes(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_db_system_shapes] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2265,14 +2550,17 @@ def sample_list_db_system_shapes(): # Done; return the response. return response - def list_autonomous_databases(self, - request: Optional[Union[oracledatabase.ListAutonomousDatabasesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAutonomousDatabasesPager: + def list_autonomous_databases( + self, + request: Optional[ + Union[oracledatabase.ListAutonomousDatabasesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAutonomousDatabasesPager: r"""Lists the Autonomous Databases in a given project and location. @@ -2334,8 +2622,10 @@ def sample_list_autonomous_databases(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2348,14 +2638,14 @@ def sample_list_autonomous_databases(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_autonomous_databases] + rpc = self._transport._wrapped_methods[ + self._transport.list_autonomous_databases + ] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2383,14 +2673,17 @@ def sample_list_autonomous_databases(): # Done; return the response. return response - def get_autonomous_database(self, - request: Optional[Union[oracledatabase.GetAutonomousDatabaseRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> autonomous_database.AutonomousDatabase: + def get_autonomous_database( + self, + request: Optional[ + Union[oracledatabase.GetAutonomousDatabaseRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autonomous_database.AutonomousDatabase: r"""Gets the details of a single Autonomous Database. .. code-block:: python @@ -2448,8 +2741,10 @@ def sample_get_autonomous_database(): # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2464,12 +2759,10 @@ def sample_get_autonomous_database(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_autonomous_database] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2486,16 +2779,21 @@ def sample_get_autonomous_database(): # Done; return the response. return response - def create_autonomous_database(self, - request: Optional[Union[oracledatabase.CreateAutonomousDatabaseRequest, dict]] = None, - *, - parent: Optional[str] = None, - autonomous_database: Optional[gco_autonomous_database.AutonomousDatabase] = None, - autonomous_database_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def create_autonomous_database( + self, + request: Optional[ + Union[oracledatabase.CreateAutonomousDatabaseRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + autonomous_database: Optional[ + gco_autonomous_database.AutonomousDatabase + ] = None, + autonomous_database_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Creates a new Autonomous Database in a given project and location. @@ -2580,10 +2878,14 @@ def sample_create_autonomous_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, autonomous_database, autonomous_database_id]) + has_flattened_params = any( + [parent, autonomous_database, autonomous_database_id] + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2600,14 +2902,14 @@ def sample_create_autonomous_database(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_autonomous_database] + rpc = self._transport._wrapped_methods[ + self._transport.create_autonomous_database + ] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2632,14 +2934,17 @@ def sample_create_autonomous_database(): # Done; return the response. return response - def delete_autonomous_database(self, - request: Optional[Union[oracledatabase.DeleteAutonomousDatabaseRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def delete_autonomous_database( + self, + request: Optional[ + Union[oracledatabase.DeleteAutonomousDatabaseRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Deletes a single Autonomous Database. .. code-block:: python @@ -2710,8 +3015,10 @@ def sample_delete_autonomous_database(): # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2724,14 +3031,14 @@ def sample_delete_autonomous_database(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_autonomous_database] + rpc = self._transport._wrapped_methods[ + self._transport.delete_autonomous_database + ] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2756,15 +3063,18 @@ def sample_delete_autonomous_database(): # Done; return the response. return response - def restore_autonomous_database(self, - request: Optional[Union[oracledatabase.RestoreAutonomousDatabaseRequest, dict]] = None, - *, - name: Optional[str] = None, - restore_time: Optional[timestamp_pb2.Timestamp] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def restore_autonomous_database( + self, + request: Optional[ + Union[oracledatabase.RestoreAutonomousDatabaseRequest, dict] + ] = None, + *, + name: Optional[str] = None, + restore_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Restores a single Autonomous Database. .. code-block:: python @@ -2834,8 +3144,10 @@ def sample_restore_autonomous_database(): # gotten any keyword arguments that map to the request. has_flattened_params = any([name, restore_time]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2850,14 +3162,14 @@ def sample_restore_autonomous_database(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.restore_autonomous_database] + rpc = self._transport._wrapped_methods[ + self._transport.restore_autonomous_database + ] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2882,17 +3194,20 @@ def sample_restore_autonomous_database(): # Done; return the response. return response - def generate_autonomous_database_wallet(self, - request: Optional[Union[oracledatabase.GenerateAutonomousDatabaseWalletRequest, dict]] = None, - *, - name: Optional[str] = None, - type_: Optional[autonomous_database.GenerateType] = None, - is_regional: Optional[bool] = None, - password: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> oracledatabase.GenerateAutonomousDatabaseWalletResponse: + def generate_autonomous_database_wallet( + self, + request: Optional[ + Union[oracledatabase.GenerateAutonomousDatabaseWalletRequest, dict] + ] = None, + *, + name: Optional[str] = None, + type_: Optional[autonomous_database.GenerateType] = None, + is_regional: Optional[bool] = None, + password: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.GenerateAutonomousDatabaseWalletResponse: r"""Generates a wallet for an Autonomous Database. .. code-block:: python @@ -2974,12 +3289,16 @@ def sample_generate_autonomous_database_wallet(): # gotten any keyword arguments that map to the request. has_flattened_params = any([name, type_, is_regional, password]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, oracledatabase.GenerateAutonomousDatabaseWalletRequest): + if not isinstance( + request, oracledatabase.GenerateAutonomousDatabaseWalletRequest + ): request = oracledatabase.GenerateAutonomousDatabaseWalletRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2994,14 +3313,14 @@ def sample_generate_autonomous_database_wallet(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.generate_autonomous_database_wallet] + rpc = self._transport._wrapped_methods[ + self._transport.generate_autonomous_database_wallet + ] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3018,14 +3337,17 @@ def sample_generate_autonomous_database_wallet(): # Done; return the response. return response - def list_autonomous_db_versions(self, - request: Optional[Union[oracledatabase.ListAutonomousDbVersionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAutonomousDbVersionsPager: + def list_autonomous_db_versions( + self, + request: Optional[ + Union[oracledatabase.ListAutonomousDbVersionsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAutonomousDbVersionsPager: r"""Lists all the available Autonomous Database versions for a project and location. @@ -3087,8 +3409,10 @@ def sample_list_autonomous_db_versions(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3101,14 +3425,14 @@ def sample_list_autonomous_db_versions(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_autonomous_db_versions] + rpc = self._transport._wrapped_methods[ + self._transport.list_autonomous_db_versions + ] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -3136,14 +3460,17 @@ def sample_list_autonomous_db_versions(): # Done; return the response. return response - def list_autonomous_database_character_sets(self, - request: Optional[Union[oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAutonomousDatabaseCharacterSetsPager: + def list_autonomous_database_character_sets( + self, + request: Optional[ + Union[oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAutonomousDatabaseCharacterSetsPager: r"""Lists Autonomous Database Character Sets in a given project and location. @@ -3205,12 +3532,16 @@ def sample_list_autonomous_database_character_sets(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, oracledatabase.ListAutonomousDatabaseCharacterSetsRequest): + if not isinstance( + request, oracledatabase.ListAutonomousDatabaseCharacterSetsRequest + ): request = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3219,14 +3550,14 @@ def sample_list_autonomous_database_character_sets(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_autonomous_database_character_sets] + rpc = self._transport._wrapped_methods[ + self._transport.list_autonomous_database_character_sets + ] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -3254,14 +3585,17 @@ def sample_list_autonomous_database_character_sets(): # Done; return the response. return response - def list_autonomous_database_backups(self, - request: Optional[Union[oracledatabase.ListAutonomousDatabaseBackupsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAutonomousDatabaseBackupsPager: + def list_autonomous_database_backups( + self, + request: Optional[ + Union[oracledatabase.ListAutonomousDatabaseBackupsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAutonomousDatabaseBackupsPager: r"""Lists the long-term and automatic backups of an Autonomous Database. @@ -3323,8 +3657,10 @@ def sample_list_autonomous_database_backups(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3337,14 +3673,14 @@ def sample_list_autonomous_database_backups(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_autonomous_database_backups] + rpc = self._transport._wrapped_methods[ + self._transport.list_autonomous_database_backups + ] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -3425,8 +3761,7 @@ def list_operations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3434,7 +3769,11 @@ def list_operations( # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3479,8 +3818,7 @@ def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3488,7 +3826,11 @@ def get_operation( # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3537,15 +3879,19 @@ def delete_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) def cancel_operation( self, @@ -3590,15 +3936,19 @@ def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) def get_location( self, @@ -3640,8 +3990,7 @@ def get_location( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3649,7 +3998,11 @@ def get_location( # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3694,8 +4047,7 @@ def list_locations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3703,15 +4055,19 @@ def list_locations( # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) -__all__ = ( - "OracleDatabaseClient", -) +__all__ = ("OracleDatabaseClient",) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/pagers.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/pagers.py similarity index 76% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/pagers.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/pagers.py index 8e1634d718bd..111ec9a9d392 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/pagers.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/pagers.py @@ -13,29 +13,45 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + from google.api_core import gapic_v1 from google.api_core import retry as retries from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.cloud.oracledatabase_v1.types import autonomous_database -from google.cloud.oracledatabase_v1.types import autonomous_database_character_set -from google.cloud.oracledatabase_v1.types import autonomous_db_backup -from google.cloud.oracledatabase_v1.types import autonomous_db_version -from google.cloud.oracledatabase_v1.types import db_node -from google.cloud.oracledatabase_v1.types import db_server -from google.cloud.oracledatabase_v1.types import db_system_shape -from google.cloud.oracledatabase_v1.types import entitlement -from google.cloud.oracledatabase_v1.types import exadata_infra -from google.cloud.oracledatabase_v1.types import gi_version -from google.cloud.oracledatabase_v1.types import oracledatabase -from google.cloud.oracledatabase_v1.types import vm_cluster +from google.cloud.oracledatabase_v1.types import ( + autonomous_database, + autonomous_database_character_set, + autonomous_db_backup, + autonomous_db_version, + db_node, + db_server, + db_system_shape, + entitlement, + exadata_infra, + gi_version, + oracledatabase, + vm_cluster, +) class ListCloudExadataInfrastructuresPager: @@ -55,14 +71,17 @@ class ListCloudExadataInfrastructuresPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., oracledatabase.ListCloudExadataInfrastructuresResponse], - request: oracledatabase.ListCloudExadataInfrastructuresRequest, - response: oracledatabase.ListCloudExadataInfrastructuresResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[..., oracledatabase.ListCloudExadataInfrastructuresResponse], + request: oracledatabase.ListCloudExadataInfrastructuresRequest, + response: oracledatabase.ListCloudExadataInfrastructuresResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -93,7 +112,12 @@ def pages(self) -> Iterator[oracledatabase.ListCloudExadataInfrastructuresRespon yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[exadata_infra.CloudExadataInfrastructure]: @@ -101,7 +125,7 @@ def __iter__(self) -> Iterator[exadata_infra.CloudExadataInfrastructure]: yield from page.cloud_exadata_infrastructures def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListCloudVmClustersPager: @@ -121,14 +145,17 @@ class ListCloudVmClustersPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., oracledatabase.ListCloudVmClustersResponse], - request: oracledatabase.ListCloudVmClustersRequest, - response: oracledatabase.ListCloudVmClustersResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[..., oracledatabase.ListCloudVmClustersResponse], + request: oracledatabase.ListCloudVmClustersRequest, + response: oracledatabase.ListCloudVmClustersResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -159,7 +186,12 @@ def pages(self) -> Iterator[oracledatabase.ListCloudVmClustersResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[vm_cluster.CloudVmCluster]: @@ -167,7 +199,7 @@ def __iter__(self) -> Iterator[vm_cluster.CloudVmCluster]: yield from page.cloud_vm_clusters def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListEntitlementsPager: @@ -187,14 +219,17 @@ class ListEntitlementsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., oracledatabase.ListEntitlementsResponse], - request: oracledatabase.ListEntitlementsRequest, - response: oracledatabase.ListEntitlementsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[..., oracledatabase.ListEntitlementsResponse], + request: oracledatabase.ListEntitlementsRequest, + response: oracledatabase.ListEntitlementsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -225,7 +260,12 @@ def pages(self) -> Iterator[oracledatabase.ListEntitlementsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[entitlement.Entitlement]: @@ -233,7 +273,7 @@ def __iter__(self) -> Iterator[entitlement.Entitlement]: yield from page.entitlements def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListDbServersPager: @@ -253,14 +293,17 @@ class ListDbServersPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., oracledatabase.ListDbServersResponse], - request: oracledatabase.ListDbServersRequest, - response: oracledatabase.ListDbServersResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[..., oracledatabase.ListDbServersResponse], + request: oracledatabase.ListDbServersRequest, + response: oracledatabase.ListDbServersResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -291,7 +334,12 @@ def pages(self) -> Iterator[oracledatabase.ListDbServersResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[db_server.DbServer]: @@ -299,7 +347,7 @@ def __iter__(self) -> Iterator[db_server.DbServer]: yield from page.db_servers def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListDbNodesPager: @@ -319,14 +367,17 @@ class ListDbNodesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., oracledatabase.ListDbNodesResponse], - request: oracledatabase.ListDbNodesRequest, - response: oracledatabase.ListDbNodesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[..., oracledatabase.ListDbNodesResponse], + request: oracledatabase.ListDbNodesRequest, + response: oracledatabase.ListDbNodesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -357,7 +408,12 @@ def pages(self) -> Iterator[oracledatabase.ListDbNodesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[db_node.DbNode]: @@ -365,7 +421,7 @@ def __iter__(self) -> Iterator[db_node.DbNode]: yield from page.db_nodes def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListGiVersionsPager: @@ -385,14 +441,17 @@ class ListGiVersionsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., oracledatabase.ListGiVersionsResponse], - request: oracledatabase.ListGiVersionsRequest, - response: oracledatabase.ListGiVersionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[..., oracledatabase.ListGiVersionsResponse], + request: oracledatabase.ListGiVersionsRequest, + response: oracledatabase.ListGiVersionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -423,7 +482,12 @@ def pages(self) -> Iterator[oracledatabase.ListGiVersionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[gi_version.GiVersion]: @@ -431,7 +495,7 @@ def __iter__(self) -> Iterator[gi_version.GiVersion]: yield from page.gi_versions def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListDbSystemShapesPager: @@ -451,14 +515,17 @@ class ListDbSystemShapesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., oracledatabase.ListDbSystemShapesResponse], - request: oracledatabase.ListDbSystemShapesRequest, - response: oracledatabase.ListDbSystemShapesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[..., oracledatabase.ListDbSystemShapesResponse], + request: oracledatabase.ListDbSystemShapesRequest, + response: oracledatabase.ListDbSystemShapesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -489,7 +556,12 @@ def pages(self) -> Iterator[oracledatabase.ListDbSystemShapesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[db_system_shape.DbSystemShape]: @@ -497,7 +569,7 @@ def __iter__(self) -> Iterator[db_system_shape.DbSystemShape]: yield from page.db_system_shapes def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListAutonomousDatabasesPager: @@ -517,14 +589,17 @@ class ListAutonomousDatabasesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., oracledatabase.ListAutonomousDatabasesResponse], - request: oracledatabase.ListAutonomousDatabasesRequest, - response: oracledatabase.ListAutonomousDatabasesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[..., oracledatabase.ListAutonomousDatabasesResponse], + request: oracledatabase.ListAutonomousDatabasesRequest, + response: oracledatabase.ListAutonomousDatabasesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -555,7 +630,12 @@ def pages(self) -> Iterator[oracledatabase.ListAutonomousDatabasesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[autonomous_database.AutonomousDatabase]: @@ -563,7 +643,7 @@ def __iter__(self) -> Iterator[autonomous_database.AutonomousDatabase]: yield from page.autonomous_databases def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListAutonomousDbVersionsPager: @@ -583,14 +663,17 @@ class ListAutonomousDbVersionsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., oracledatabase.ListAutonomousDbVersionsResponse], - request: oracledatabase.ListAutonomousDbVersionsRequest, - response: oracledatabase.ListAutonomousDbVersionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[..., oracledatabase.ListAutonomousDbVersionsResponse], + request: oracledatabase.ListAutonomousDbVersionsRequest, + response: oracledatabase.ListAutonomousDbVersionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -621,7 +704,12 @@ def pages(self) -> Iterator[oracledatabase.ListAutonomousDbVersionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[autonomous_db_version.AutonomousDbVersion]: @@ -629,7 +717,7 @@ def __iter__(self) -> Iterator[autonomous_db_version.AutonomousDbVersion]: yield from page.autonomous_db_versions def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListAutonomousDatabaseCharacterSetsPager: @@ -649,14 +737,19 @@ class ListAutonomousDatabaseCharacterSetsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., oracledatabase.ListAutonomousDatabaseCharacterSetsResponse], - request: oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, - response: oracledatabase.ListAutonomousDatabaseCharacterSetsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[ + ..., oracledatabase.ListAutonomousDatabaseCharacterSetsResponse + ], + request: oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, + response: oracledatabase.ListAutonomousDatabaseCharacterSetsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -673,7 +766,9 @@ def __init__(self, sent along with the request as metadata. """ self._method = method - self._request = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest(request) + self._request = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest( + request + ) self._response = response self._retry = retry self._timeout = timeout @@ -683,19 +778,28 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterator[oracledatabase.ListAutonomousDatabaseCharacterSetsResponse]: + def pages( + self, + ) -> Iterator[oracledatabase.ListAutonomousDatabaseCharacterSetsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response - def __iter__(self) -> Iterator[autonomous_database_character_set.AutonomousDatabaseCharacterSet]: + def __iter__( + self, + ) -> Iterator[autonomous_database_character_set.AutonomousDatabaseCharacterSet]: for page in self.pages: yield from page.autonomous_database_character_sets def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListAutonomousDatabaseBackupsPager: @@ -715,14 +819,17 @@ class ListAutonomousDatabaseBackupsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., oracledatabase.ListAutonomousDatabaseBackupsResponse], - request: oracledatabase.ListAutonomousDatabaseBackupsRequest, - response: oracledatabase.ListAutonomousDatabaseBackupsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[..., oracledatabase.ListAutonomousDatabaseBackupsResponse], + request: oracledatabase.ListAutonomousDatabaseBackupsRequest, + response: oracledatabase.ListAutonomousDatabaseBackupsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -753,7 +860,12 @@ def pages(self) -> Iterator[oracledatabase.ListAutonomousDatabaseBackupsResponse yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[autonomous_db_backup.AutonomousDatabaseBackup]: @@ -761,4 +873,4 @@ def __iter__(self) -> Iterator[autonomous_db_backup.AutonomousDatabaseBackup]: yield from page.autonomous_database_backups def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/__init__.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/__init__.py similarity index 76% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/__init__.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/__init__.py index a6e6aeba08ab..91a06d71780e 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/__init__.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/__init__.py @@ -17,16 +17,14 @@ from typing import Dict, Type from .base import OracleDatabaseTransport -from .rest import OracleDatabaseRestTransport -from .rest import OracleDatabaseRestInterceptor - +from .rest import OracleDatabaseRestInterceptor, OracleDatabaseRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[OracleDatabaseTransport]] -_transport_registry['rest'] = OracleDatabaseRestTransport +_transport_registry["rest"] = OracleDatabaseRestTransport __all__ = ( - 'OracleDatabaseTransport', - 'OracleDatabaseRestTransport', - 'OracleDatabaseRestInterceptor', + "OracleDatabaseTransport", + "OracleDatabaseRestTransport", + "OracleDatabaseRestInterceptor", ) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/base.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/base.py similarity index 68% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/base.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/base.py index 72af8abc4db0..ced22db4e8d6 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/base.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/base.py @@ -16,47 +16,49 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -from google.cloud.oracledatabase_v1 import gapic_version as package_version - -import google.auth # type: ignore import google.api_core from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, operations_v1 from google.api_core import retry as retries -from google.api_core import operations_v1 +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.cloud.oracledatabase_v1.types import autonomous_database -from google.cloud.oracledatabase_v1.types import exadata_infra -from google.cloud.oracledatabase_v1.types import oracledatabase -from google.cloud.oracledatabase_v1.types import vm_cluster -from google.longrunning import operations_pb2 # type: ignore +from google.cloud.oracledatabase_v1 import gapic_version as package_version +from google.cloud.oracledatabase_v1.types import ( + autonomous_database, + exadata_infra, + oracledatabase, + vm_cluster, +) -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class OracleDatabaseTransport(abc.ABC): """Abstract transport class for OracleDatabase.""" - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "oracledatabase.googleapis.com" - DEFAULT_HOST: str = 'oracledatabase.googleapis.com' def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: @@ -92,30 +94,38 @@ def __init__( # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) # Don't apply audience if the credentials file passed from user. if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host @property @@ -403,14 +413,14 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() @@ -420,201 +430,243 @@ def operations_client(self): raise NotImplementedError() @property - def list_cloud_exadata_infrastructures(self) -> Callable[ - [oracledatabase.ListCloudExadataInfrastructuresRequest], - Union[ - oracledatabase.ListCloudExadataInfrastructuresResponse, - Awaitable[oracledatabase.ListCloudExadataInfrastructuresResponse] - ]]: + def list_cloud_exadata_infrastructures( + self, + ) -> Callable[ + [oracledatabase.ListCloudExadataInfrastructuresRequest], + Union[ + oracledatabase.ListCloudExadataInfrastructuresResponse, + Awaitable[oracledatabase.ListCloudExadataInfrastructuresResponse], + ], + ]: raise NotImplementedError() @property - def get_cloud_exadata_infrastructure(self) -> Callable[ - [oracledatabase.GetCloudExadataInfrastructureRequest], - Union[ - exadata_infra.CloudExadataInfrastructure, - Awaitable[exadata_infra.CloudExadataInfrastructure] - ]]: + def get_cloud_exadata_infrastructure( + self, + ) -> Callable[ + [oracledatabase.GetCloudExadataInfrastructureRequest], + Union[ + exadata_infra.CloudExadataInfrastructure, + Awaitable[exadata_infra.CloudExadataInfrastructure], + ], + ]: raise NotImplementedError() @property - def create_cloud_exadata_infrastructure(self) -> Callable[ - [oracledatabase.CreateCloudExadataInfrastructureRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def create_cloud_exadata_infrastructure( + self, + ) -> Callable[ + [oracledatabase.CreateCloudExadataInfrastructureRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def delete_cloud_exadata_infrastructure(self) -> Callable[ - [oracledatabase.DeleteCloudExadataInfrastructureRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def delete_cloud_exadata_infrastructure( + self, + ) -> Callable[ + [oracledatabase.DeleteCloudExadataInfrastructureRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def list_cloud_vm_clusters(self) -> Callable[ - [oracledatabase.ListCloudVmClustersRequest], - Union[ - oracledatabase.ListCloudVmClustersResponse, - Awaitable[oracledatabase.ListCloudVmClustersResponse] - ]]: + def list_cloud_vm_clusters( + self, + ) -> Callable[ + [oracledatabase.ListCloudVmClustersRequest], + Union[ + oracledatabase.ListCloudVmClustersResponse, + Awaitable[oracledatabase.ListCloudVmClustersResponse], + ], + ]: raise NotImplementedError() @property - def get_cloud_vm_cluster(self) -> Callable[ - [oracledatabase.GetCloudVmClusterRequest], - Union[ - vm_cluster.CloudVmCluster, - Awaitable[vm_cluster.CloudVmCluster] - ]]: + def get_cloud_vm_cluster( + self, + ) -> Callable[ + [oracledatabase.GetCloudVmClusterRequest], + Union[vm_cluster.CloudVmCluster, Awaitable[vm_cluster.CloudVmCluster]], + ]: raise NotImplementedError() @property - def create_cloud_vm_cluster(self) -> Callable[ - [oracledatabase.CreateCloudVmClusterRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def create_cloud_vm_cluster( + self, + ) -> Callable[ + [oracledatabase.CreateCloudVmClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def delete_cloud_vm_cluster(self) -> Callable[ - [oracledatabase.DeleteCloudVmClusterRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def delete_cloud_vm_cluster( + self, + ) -> Callable[ + [oracledatabase.DeleteCloudVmClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def list_entitlements(self) -> Callable[ - [oracledatabase.ListEntitlementsRequest], - Union[ - oracledatabase.ListEntitlementsResponse, - Awaitable[oracledatabase.ListEntitlementsResponse] - ]]: + def list_entitlements( + self, + ) -> Callable[ + [oracledatabase.ListEntitlementsRequest], + Union[ + oracledatabase.ListEntitlementsResponse, + Awaitable[oracledatabase.ListEntitlementsResponse], + ], + ]: raise NotImplementedError() @property - def list_db_servers(self) -> Callable[ - [oracledatabase.ListDbServersRequest], - Union[ - oracledatabase.ListDbServersResponse, - Awaitable[oracledatabase.ListDbServersResponse] - ]]: + def list_db_servers( + self, + ) -> Callable[ + [oracledatabase.ListDbServersRequest], + Union[ + oracledatabase.ListDbServersResponse, + Awaitable[oracledatabase.ListDbServersResponse], + ], + ]: raise NotImplementedError() @property - def list_db_nodes(self) -> Callable[ - [oracledatabase.ListDbNodesRequest], - Union[ - oracledatabase.ListDbNodesResponse, - Awaitable[oracledatabase.ListDbNodesResponse] - ]]: + def list_db_nodes( + self, + ) -> Callable[ + [oracledatabase.ListDbNodesRequest], + Union[ + oracledatabase.ListDbNodesResponse, + Awaitable[oracledatabase.ListDbNodesResponse], + ], + ]: raise NotImplementedError() @property - def list_gi_versions(self) -> Callable[ - [oracledatabase.ListGiVersionsRequest], - Union[ - oracledatabase.ListGiVersionsResponse, - Awaitable[oracledatabase.ListGiVersionsResponse] - ]]: + def list_gi_versions( + self, + ) -> Callable[ + [oracledatabase.ListGiVersionsRequest], + Union[ + oracledatabase.ListGiVersionsResponse, + Awaitable[oracledatabase.ListGiVersionsResponse], + ], + ]: raise NotImplementedError() @property - def list_db_system_shapes(self) -> Callable[ - [oracledatabase.ListDbSystemShapesRequest], - Union[ - oracledatabase.ListDbSystemShapesResponse, - Awaitable[oracledatabase.ListDbSystemShapesResponse] - ]]: + def list_db_system_shapes( + self, + ) -> Callable[ + [oracledatabase.ListDbSystemShapesRequest], + Union[ + oracledatabase.ListDbSystemShapesResponse, + Awaitable[oracledatabase.ListDbSystemShapesResponse], + ], + ]: raise NotImplementedError() @property - def list_autonomous_databases(self) -> Callable[ - [oracledatabase.ListAutonomousDatabasesRequest], - Union[ - oracledatabase.ListAutonomousDatabasesResponse, - Awaitable[oracledatabase.ListAutonomousDatabasesResponse] - ]]: + def list_autonomous_databases( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDatabasesRequest], + Union[ + oracledatabase.ListAutonomousDatabasesResponse, + Awaitable[oracledatabase.ListAutonomousDatabasesResponse], + ], + ]: raise NotImplementedError() @property - def get_autonomous_database(self) -> Callable[ - [oracledatabase.GetAutonomousDatabaseRequest], - Union[ - autonomous_database.AutonomousDatabase, - Awaitable[autonomous_database.AutonomousDatabase] - ]]: + def get_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.GetAutonomousDatabaseRequest], + Union[ + autonomous_database.AutonomousDatabase, + Awaitable[autonomous_database.AutonomousDatabase], + ], + ]: raise NotImplementedError() @property - def create_autonomous_database(self) -> Callable[ - [oracledatabase.CreateAutonomousDatabaseRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def create_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.CreateAutonomousDatabaseRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def delete_autonomous_database(self) -> Callable[ - [oracledatabase.DeleteAutonomousDatabaseRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def delete_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.DeleteAutonomousDatabaseRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def restore_autonomous_database(self) -> Callable[ - [oracledatabase.RestoreAutonomousDatabaseRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def restore_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.RestoreAutonomousDatabaseRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def generate_autonomous_database_wallet(self) -> Callable[ - [oracledatabase.GenerateAutonomousDatabaseWalletRequest], - Union[ - oracledatabase.GenerateAutonomousDatabaseWalletResponse, - Awaitable[oracledatabase.GenerateAutonomousDatabaseWalletResponse] - ]]: + def generate_autonomous_database_wallet( + self, + ) -> Callable[ + [oracledatabase.GenerateAutonomousDatabaseWalletRequest], + Union[ + oracledatabase.GenerateAutonomousDatabaseWalletResponse, + Awaitable[oracledatabase.GenerateAutonomousDatabaseWalletResponse], + ], + ]: raise NotImplementedError() @property - def list_autonomous_db_versions(self) -> Callable[ - [oracledatabase.ListAutonomousDbVersionsRequest], - Union[ - oracledatabase.ListAutonomousDbVersionsResponse, - Awaitable[oracledatabase.ListAutonomousDbVersionsResponse] - ]]: + def list_autonomous_db_versions( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDbVersionsRequest], + Union[ + oracledatabase.ListAutonomousDbVersionsResponse, + Awaitable[oracledatabase.ListAutonomousDbVersionsResponse], + ], + ]: raise NotImplementedError() @property - def list_autonomous_database_character_sets(self) -> Callable[ - [oracledatabase.ListAutonomousDatabaseCharacterSetsRequest], - Union[ - oracledatabase.ListAutonomousDatabaseCharacterSetsResponse, - Awaitable[oracledatabase.ListAutonomousDatabaseCharacterSetsResponse] - ]]: + def list_autonomous_database_character_sets( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDatabaseCharacterSetsRequest], + Union[ + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse, + Awaitable[oracledatabase.ListAutonomousDatabaseCharacterSetsResponse], + ], + ]: raise NotImplementedError() @property - def list_autonomous_database_backups(self) -> Callable[ - [oracledatabase.ListAutonomousDatabaseBackupsRequest], - Union[ - oracledatabase.ListAutonomousDatabaseBackupsResponse, - Awaitable[oracledatabase.ListAutonomousDatabaseBackupsResponse] - ]]: + def list_autonomous_database_backups( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDatabaseBackupsRequest], + Union[ + oracledatabase.ListAutonomousDatabaseBackupsResponse, + Awaitable[oracledatabase.ListAutonomousDatabaseBackupsResponse], + ], + ]: raise NotImplementedError() @property @@ -622,7 +674,10 @@ def list_operations( self, ) -> Callable[ [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], ]: raise NotImplementedError() @@ -638,23 +693,18 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: raise NotImplementedError() @property - def get_location(self, + def get_location( + self, ) -> Callable[ [locations_pb2.GetLocationRequest], Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], @@ -662,10 +712,14 @@ def get_location(self, raise NotImplementedError() @property - def list_locations(self, + def list_locations( + self, ) -> Callable[ [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], ]: raise NotImplementedError() @@ -674,6 +728,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'OracleDatabaseTransport', -) +__all__ = ("OracleDatabaseTransport",) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/rest.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/rest.py similarity index 60% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/rest.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/rest.py index 3b4232729794..ad8d2e4a9c29 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/rest.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/rest.py @@ -14,26 +14,28 @@ # limitations under the License. # -from google.auth.transport.requests import AuthorizedSession # type: ignore +import dataclasses import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.protobuf import json_format -from google.api_core import operations_v1 -from google.cloud.location import locations_pb2 # type: ignore +import grpc # type: ignore from requests import __version__ as requests_version -import dataclasses -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -41,14 +43,17 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore -from google.cloud.oracledatabase_v1.types import autonomous_database -from google.cloud.oracledatabase_v1.types import exadata_infra -from google.cloud.oracledatabase_v1.types import oracledatabase -from google.cloud.oracledatabase_v1.types import vm_cluster from google.longrunning import operations_pb2 # type: ignore -from .base import OracleDatabaseTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from google.cloud.oracledatabase_v1.types import ( + autonomous_database, + exadata_infra, + oracledatabase, + vm_cluster, +) +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import OracleDatabaseTransport DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, @@ -253,7 +258,14 @@ def post_restore_autonomous_database(self, response): """ - def pre_create_autonomous_database(self, request: oracledatabase.CreateAutonomousDatabaseRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.CreateAutonomousDatabaseRequest, Sequence[Tuple[str, str]]]: + + def pre_create_autonomous_database( + self, + request: oracledatabase.CreateAutonomousDatabaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.CreateAutonomousDatabaseRequest, Sequence[Tuple[str, str]] + ]: """Pre-rpc interceptor for create_autonomous_database Override in a subclass to manipulate the request or metadata @@ -261,7 +273,9 @@ def pre_create_autonomous_database(self, request: oracledatabase.CreateAutonomou """ return request, metadata - def post_create_autonomous_database(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_create_autonomous_database( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for create_autonomous_database Override in a subclass to manipulate the response @@ -269,7 +283,15 @@ def post_create_autonomous_database(self, response: operations_pb2.Operation) -> it is returned to user code. """ return response - def pre_create_cloud_exadata_infrastructure(self, request: oracledatabase.CreateCloudExadataInfrastructureRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.CreateCloudExadataInfrastructureRequest, Sequence[Tuple[str, str]]]: + + def pre_create_cloud_exadata_infrastructure( + self, + request: oracledatabase.CreateCloudExadataInfrastructureRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.CreateCloudExadataInfrastructureRequest, + Sequence[Tuple[str, str]], + ]: """Pre-rpc interceptor for create_cloud_exadata_infrastructure Override in a subclass to manipulate the request or metadata @@ -277,7 +299,9 @@ def pre_create_cloud_exadata_infrastructure(self, request: oracledatabase.Create """ return request, metadata - def post_create_cloud_exadata_infrastructure(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_create_cloud_exadata_infrastructure( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for create_cloud_exadata_infrastructure Override in a subclass to manipulate the response @@ -285,7 +309,12 @@ def post_create_cloud_exadata_infrastructure(self, response: operations_pb2.Oper it is returned to user code. """ return response - def pre_create_cloud_vm_cluster(self, request: oracledatabase.CreateCloudVmClusterRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.CreateCloudVmClusterRequest, Sequence[Tuple[str, str]]]: + + def pre_create_cloud_vm_cluster( + self, + request: oracledatabase.CreateCloudVmClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.CreateCloudVmClusterRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for create_cloud_vm_cluster Override in a subclass to manipulate the request or metadata @@ -293,7 +322,9 @@ def pre_create_cloud_vm_cluster(self, request: oracledatabase.CreateCloudVmClust """ return request, metadata - def post_create_cloud_vm_cluster(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_create_cloud_vm_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for create_cloud_vm_cluster Override in a subclass to manipulate the response @@ -301,7 +332,14 @@ def post_create_cloud_vm_cluster(self, response: operations_pb2.Operation) -> op it is returned to user code. """ return response - def pre_delete_autonomous_database(self, request: oracledatabase.DeleteAutonomousDatabaseRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.DeleteAutonomousDatabaseRequest, Sequence[Tuple[str, str]]]: + + def pre_delete_autonomous_database( + self, + request: oracledatabase.DeleteAutonomousDatabaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.DeleteAutonomousDatabaseRequest, Sequence[Tuple[str, str]] + ]: """Pre-rpc interceptor for delete_autonomous_database Override in a subclass to manipulate the request or metadata @@ -309,7 +347,9 @@ def pre_delete_autonomous_database(self, request: oracledatabase.DeleteAutonomou """ return request, metadata - def post_delete_autonomous_database(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_delete_autonomous_database( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_autonomous_database Override in a subclass to manipulate the response @@ -317,7 +357,15 @@ def post_delete_autonomous_database(self, response: operations_pb2.Operation) -> it is returned to user code. """ return response - def pre_delete_cloud_exadata_infrastructure(self, request: oracledatabase.DeleteCloudExadataInfrastructureRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.DeleteCloudExadataInfrastructureRequest, Sequence[Tuple[str, str]]]: + + def pre_delete_cloud_exadata_infrastructure( + self, + request: oracledatabase.DeleteCloudExadataInfrastructureRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.DeleteCloudExadataInfrastructureRequest, + Sequence[Tuple[str, str]], + ]: """Pre-rpc interceptor for delete_cloud_exadata_infrastructure Override in a subclass to manipulate the request or metadata @@ -325,7 +373,9 @@ def pre_delete_cloud_exadata_infrastructure(self, request: oracledatabase.Delete """ return request, metadata - def post_delete_cloud_exadata_infrastructure(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_delete_cloud_exadata_infrastructure( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_cloud_exadata_infrastructure Override in a subclass to manipulate the response @@ -333,7 +383,12 @@ def post_delete_cloud_exadata_infrastructure(self, response: operations_pb2.Oper it is returned to user code. """ return response - def pre_delete_cloud_vm_cluster(self, request: oracledatabase.DeleteCloudVmClusterRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.DeleteCloudVmClusterRequest, Sequence[Tuple[str, str]]]: + + def pre_delete_cloud_vm_cluster( + self, + request: oracledatabase.DeleteCloudVmClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.DeleteCloudVmClusterRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for delete_cloud_vm_cluster Override in a subclass to manipulate the request or metadata @@ -341,7 +396,9 @@ def pre_delete_cloud_vm_cluster(self, request: oracledatabase.DeleteCloudVmClust """ return request, metadata - def post_delete_cloud_vm_cluster(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_delete_cloud_vm_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_cloud_vm_cluster Override in a subclass to manipulate the response @@ -349,7 +406,15 @@ def post_delete_cloud_vm_cluster(self, response: operations_pb2.Operation) -> op it is returned to user code. """ return response - def pre_generate_autonomous_database_wallet(self, request: oracledatabase.GenerateAutonomousDatabaseWalletRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.GenerateAutonomousDatabaseWalletRequest, Sequence[Tuple[str, str]]]: + + def pre_generate_autonomous_database_wallet( + self, + request: oracledatabase.GenerateAutonomousDatabaseWalletRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.GenerateAutonomousDatabaseWalletRequest, + Sequence[Tuple[str, str]], + ]: """Pre-rpc interceptor for generate_autonomous_database_wallet Override in a subclass to manipulate the request or metadata @@ -357,7 +422,9 @@ def pre_generate_autonomous_database_wallet(self, request: oracledatabase.Genera """ return request, metadata - def post_generate_autonomous_database_wallet(self, response: oracledatabase.GenerateAutonomousDatabaseWalletResponse) -> oracledatabase.GenerateAutonomousDatabaseWalletResponse: + def post_generate_autonomous_database_wallet( + self, response: oracledatabase.GenerateAutonomousDatabaseWalletResponse + ) -> oracledatabase.GenerateAutonomousDatabaseWalletResponse: """Post-rpc interceptor for generate_autonomous_database_wallet Override in a subclass to manipulate the response @@ -365,7 +432,12 @@ def post_generate_autonomous_database_wallet(self, response: oracledatabase.Gene it is returned to user code. """ return response - def pre_get_autonomous_database(self, request: oracledatabase.GetAutonomousDatabaseRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.GetAutonomousDatabaseRequest, Sequence[Tuple[str, str]]]: + + def pre_get_autonomous_database( + self, + request: oracledatabase.GetAutonomousDatabaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.GetAutonomousDatabaseRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_autonomous_database Override in a subclass to manipulate the request or metadata @@ -373,7 +445,9 @@ def pre_get_autonomous_database(self, request: oracledatabase.GetAutonomousDatab """ return request, metadata - def post_get_autonomous_database(self, response: autonomous_database.AutonomousDatabase) -> autonomous_database.AutonomousDatabase: + def post_get_autonomous_database( + self, response: autonomous_database.AutonomousDatabase + ) -> autonomous_database.AutonomousDatabase: """Post-rpc interceptor for get_autonomous_database Override in a subclass to manipulate the response @@ -381,7 +455,14 @@ def post_get_autonomous_database(self, response: autonomous_database.AutonomousD it is returned to user code. """ return response - def pre_get_cloud_exadata_infrastructure(self, request: oracledatabase.GetCloudExadataInfrastructureRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.GetCloudExadataInfrastructureRequest, Sequence[Tuple[str, str]]]: + + def pre_get_cloud_exadata_infrastructure( + self, + request: oracledatabase.GetCloudExadataInfrastructureRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.GetCloudExadataInfrastructureRequest, Sequence[Tuple[str, str]] + ]: """Pre-rpc interceptor for get_cloud_exadata_infrastructure Override in a subclass to manipulate the request or metadata @@ -389,7 +470,9 @@ def pre_get_cloud_exadata_infrastructure(self, request: oracledatabase.GetCloudE """ return request, metadata - def post_get_cloud_exadata_infrastructure(self, response: exadata_infra.CloudExadataInfrastructure) -> exadata_infra.CloudExadataInfrastructure: + def post_get_cloud_exadata_infrastructure( + self, response: exadata_infra.CloudExadataInfrastructure + ) -> exadata_infra.CloudExadataInfrastructure: """Post-rpc interceptor for get_cloud_exadata_infrastructure Override in a subclass to manipulate the response @@ -397,7 +480,12 @@ def post_get_cloud_exadata_infrastructure(self, response: exadata_infra.CloudExa it is returned to user code. """ return response - def pre_get_cloud_vm_cluster(self, request: oracledatabase.GetCloudVmClusterRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.GetCloudVmClusterRequest, Sequence[Tuple[str, str]]]: + + def pre_get_cloud_vm_cluster( + self, + request: oracledatabase.GetCloudVmClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.GetCloudVmClusterRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_cloud_vm_cluster Override in a subclass to manipulate the request or metadata @@ -405,7 +493,9 @@ def pre_get_cloud_vm_cluster(self, request: oracledatabase.GetCloudVmClusterRequ """ return request, metadata - def post_get_cloud_vm_cluster(self, response: vm_cluster.CloudVmCluster) -> vm_cluster.CloudVmCluster: + def post_get_cloud_vm_cluster( + self, response: vm_cluster.CloudVmCluster + ) -> vm_cluster.CloudVmCluster: """Post-rpc interceptor for get_cloud_vm_cluster Override in a subclass to manipulate the response @@ -413,7 +503,14 @@ def post_get_cloud_vm_cluster(self, response: vm_cluster.CloudVmCluster) -> vm_c it is returned to user code. """ return response - def pre_list_autonomous_database_backups(self, request: oracledatabase.ListAutonomousDatabaseBackupsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListAutonomousDatabaseBackupsRequest, Sequence[Tuple[str, str]]]: + + def pre_list_autonomous_database_backups( + self, + request: oracledatabase.ListAutonomousDatabaseBackupsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.ListAutonomousDatabaseBackupsRequest, Sequence[Tuple[str, str]] + ]: """Pre-rpc interceptor for list_autonomous_database_backups Override in a subclass to manipulate the request or metadata @@ -421,7 +518,9 @@ def pre_list_autonomous_database_backups(self, request: oracledatabase.ListAuton """ return request, metadata - def post_list_autonomous_database_backups(self, response: oracledatabase.ListAutonomousDatabaseBackupsResponse) -> oracledatabase.ListAutonomousDatabaseBackupsResponse: + def post_list_autonomous_database_backups( + self, response: oracledatabase.ListAutonomousDatabaseBackupsResponse + ) -> oracledatabase.ListAutonomousDatabaseBackupsResponse: """Post-rpc interceptor for list_autonomous_database_backups Override in a subclass to manipulate the response @@ -429,7 +528,15 @@ def post_list_autonomous_database_backups(self, response: oracledatabase.ListAut it is returned to user code. """ return response - def pre_list_autonomous_database_character_sets(self, request: oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, Sequence[Tuple[str, str]]]: + + def pre_list_autonomous_database_character_sets( + self, + request: oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, + Sequence[Tuple[str, str]], + ]: """Pre-rpc interceptor for list_autonomous_database_character_sets Override in a subclass to manipulate the request or metadata @@ -437,7 +544,9 @@ def pre_list_autonomous_database_character_sets(self, request: oracledatabase.Li """ return request, metadata - def post_list_autonomous_database_character_sets(self, response: oracledatabase.ListAutonomousDatabaseCharacterSetsResponse) -> oracledatabase.ListAutonomousDatabaseCharacterSetsResponse: + def post_list_autonomous_database_character_sets( + self, response: oracledatabase.ListAutonomousDatabaseCharacterSetsResponse + ) -> oracledatabase.ListAutonomousDatabaseCharacterSetsResponse: """Post-rpc interceptor for list_autonomous_database_character_sets Override in a subclass to manipulate the response @@ -445,7 +554,14 @@ def post_list_autonomous_database_character_sets(self, response: oracledatabase. it is returned to user code. """ return response - def pre_list_autonomous_databases(self, request: oracledatabase.ListAutonomousDatabasesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListAutonomousDatabasesRequest, Sequence[Tuple[str, str]]]: + + def pre_list_autonomous_databases( + self, + request: oracledatabase.ListAutonomousDatabasesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.ListAutonomousDatabasesRequest, Sequence[Tuple[str, str]] + ]: """Pre-rpc interceptor for list_autonomous_databases Override in a subclass to manipulate the request or metadata @@ -453,7 +569,9 @@ def pre_list_autonomous_databases(self, request: oracledatabase.ListAutonomousDa """ return request, metadata - def post_list_autonomous_databases(self, response: oracledatabase.ListAutonomousDatabasesResponse) -> oracledatabase.ListAutonomousDatabasesResponse: + def post_list_autonomous_databases( + self, response: oracledatabase.ListAutonomousDatabasesResponse + ) -> oracledatabase.ListAutonomousDatabasesResponse: """Post-rpc interceptor for list_autonomous_databases Override in a subclass to manipulate the response @@ -461,7 +579,14 @@ def post_list_autonomous_databases(self, response: oracledatabase.ListAutonomous it is returned to user code. """ return response - def pre_list_autonomous_db_versions(self, request: oracledatabase.ListAutonomousDbVersionsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListAutonomousDbVersionsRequest, Sequence[Tuple[str, str]]]: + + def pre_list_autonomous_db_versions( + self, + request: oracledatabase.ListAutonomousDbVersionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.ListAutonomousDbVersionsRequest, Sequence[Tuple[str, str]] + ]: """Pre-rpc interceptor for list_autonomous_db_versions Override in a subclass to manipulate the request or metadata @@ -469,7 +594,9 @@ def pre_list_autonomous_db_versions(self, request: oracledatabase.ListAutonomous """ return request, metadata - def post_list_autonomous_db_versions(self, response: oracledatabase.ListAutonomousDbVersionsResponse) -> oracledatabase.ListAutonomousDbVersionsResponse: + def post_list_autonomous_db_versions( + self, response: oracledatabase.ListAutonomousDbVersionsResponse + ) -> oracledatabase.ListAutonomousDbVersionsResponse: """Post-rpc interceptor for list_autonomous_db_versions Override in a subclass to manipulate the response @@ -477,7 +604,14 @@ def post_list_autonomous_db_versions(self, response: oracledatabase.ListAutonomo it is returned to user code. """ return response - def pre_list_cloud_exadata_infrastructures(self, request: oracledatabase.ListCloudExadataInfrastructuresRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListCloudExadataInfrastructuresRequest, Sequence[Tuple[str, str]]]: + + def pre_list_cloud_exadata_infrastructures( + self, + request: oracledatabase.ListCloudExadataInfrastructuresRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.ListCloudExadataInfrastructuresRequest, Sequence[Tuple[str, str]] + ]: """Pre-rpc interceptor for list_cloud_exadata_infrastructures Override in a subclass to manipulate the request or metadata @@ -485,7 +619,9 @@ def pre_list_cloud_exadata_infrastructures(self, request: oracledatabase.ListClo """ return request, metadata - def post_list_cloud_exadata_infrastructures(self, response: oracledatabase.ListCloudExadataInfrastructuresResponse) -> oracledatabase.ListCloudExadataInfrastructuresResponse: + def post_list_cloud_exadata_infrastructures( + self, response: oracledatabase.ListCloudExadataInfrastructuresResponse + ) -> oracledatabase.ListCloudExadataInfrastructuresResponse: """Post-rpc interceptor for list_cloud_exadata_infrastructures Override in a subclass to manipulate the response @@ -493,7 +629,12 @@ def post_list_cloud_exadata_infrastructures(self, response: oracledatabase.ListC it is returned to user code. """ return response - def pre_list_cloud_vm_clusters(self, request: oracledatabase.ListCloudVmClustersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListCloudVmClustersRequest, Sequence[Tuple[str, str]]]: + + def pre_list_cloud_vm_clusters( + self, + request: oracledatabase.ListCloudVmClustersRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.ListCloudVmClustersRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_cloud_vm_clusters Override in a subclass to manipulate the request or metadata @@ -501,7 +642,9 @@ def pre_list_cloud_vm_clusters(self, request: oracledatabase.ListCloudVmClusters """ return request, metadata - def post_list_cloud_vm_clusters(self, response: oracledatabase.ListCloudVmClustersResponse) -> oracledatabase.ListCloudVmClustersResponse: + def post_list_cloud_vm_clusters( + self, response: oracledatabase.ListCloudVmClustersResponse + ) -> oracledatabase.ListCloudVmClustersResponse: """Post-rpc interceptor for list_cloud_vm_clusters Override in a subclass to manipulate the response @@ -509,7 +652,12 @@ def post_list_cloud_vm_clusters(self, response: oracledatabase.ListCloudVmCluste it is returned to user code. """ return response - def pre_list_db_nodes(self, request: oracledatabase.ListDbNodesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListDbNodesRequest, Sequence[Tuple[str, str]]]: + + def pre_list_db_nodes( + self, + request: oracledatabase.ListDbNodesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.ListDbNodesRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_db_nodes Override in a subclass to manipulate the request or metadata @@ -517,7 +665,9 @@ def pre_list_db_nodes(self, request: oracledatabase.ListDbNodesRequest, metadata """ return request, metadata - def post_list_db_nodes(self, response: oracledatabase.ListDbNodesResponse) -> oracledatabase.ListDbNodesResponse: + def post_list_db_nodes( + self, response: oracledatabase.ListDbNodesResponse + ) -> oracledatabase.ListDbNodesResponse: """Post-rpc interceptor for list_db_nodes Override in a subclass to manipulate the response @@ -525,7 +675,12 @@ def post_list_db_nodes(self, response: oracledatabase.ListDbNodesResponse) -> or it is returned to user code. """ return response - def pre_list_db_servers(self, request: oracledatabase.ListDbServersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListDbServersRequest, Sequence[Tuple[str, str]]]: + + def pre_list_db_servers( + self, + request: oracledatabase.ListDbServersRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.ListDbServersRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_db_servers Override in a subclass to manipulate the request or metadata @@ -533,7 +688,9 @@ def pre_list_db_servers(self, request: oracledatabase.ListDbServersRequest, meta """ return request, metadata - def post_list_db_servers(self, response: oracledatabase.ListDbServersResponse) -> oracledatabase.ListDbServersResponse: + def post_list_db_servers( + self, response: oracledatabase.ListDbServersResponse + ) -> oracledatabase.ListDbServersResponse: """Post-rpc interceptor for list_db_servers Override in a subclass to manipulate the response @@ -541,7 +698,12 @@ def post_list_db_servers(self, response: oracledatabase.ListDbServersResponse) - it is returned to user code. """ return response - def pre_list_db_system_shapes(self, request: oracledatabase.ListDbSystemShapesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListDbSystemShapesRequest, Sequence[Tuple[str, str]]]: + + def pre_list_db_system_shapes( + self, + request: oracledatabase.ListDbSystemShapesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.ListDbSystemShapesRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_db_system_shapes Override in a subclass to manipulate the request or metadata @@ -549,7 +711,9 @@ def pre_list_db_system_shapes(self, request: oracledatabase.ListDbSystemShapesRe """ return request, metadata - def post_list_db_system_shapes(self, response: oracledatabase.ListDbSystemShapesResponse) -> oracledatabase.ListDbSystemShapesResponse: + def post_list_db_system_shapes( + self, response: oracledatabase.ListDbSystemShapesResponse + ) -> oracledatabase.ListDbSystemShapesResponse: """Post-rpc interceptor for list_db_system_shapes Override in a subclass to manipulate the response @@ -557,7 +721,12 @@ def post_list_db_system_shapes(self, response: oracledatabase.ListDbSystemShapes it is returned to user code. """ return response - def pre_list_entitlements(self, request: oracledatabase.ListEntitlementsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListEntitlementsRequest, Sequence[Tuple[str, str]]]: + + def pre_list_entitlements( + self, + request: oracledatabase.ListEntitlementsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.ListEntitlementsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_entitlements Override in a subclass to manipulate the request or metadata @@ -565,7 +734,9 @@ def pre_list_entitlements(self, request: oracledatabase.ListEntitlementsRequest, """ return request, metadata - def post_list_entitlements(self, response: oracledatabase.ListEntitlementsResponse) -> oracledatabase.ListEntitlementsResponse: + def post_list_entitlements( + self, response: oracledatabase.ListEntitlementsResponse + ) -> oracledatabase.ListEntitlementsResponse: """Post-rpc interceptor for list_entitlements Override in a subclass to manipulate the response @@ -573,7 +744,12 @@ def post_list_entitlements(self, response: oracledatabase.ListEntitlementsRespon it is returned to user code. """ return response - def pre_list_gi_versions(self, request: oracledatabase.ListGiVersionsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListGiVersionsRequest, Sequence[Tuple[str, str]]]: + + def pre_list_gi_versions( + self, + request: oracledatabase.ListGiVersionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.ListGiVersionsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_gi_versions Override in a subclass to manipulate the request or metadata @@ -581,7 +757,9 @@ def pre_list_gi_versions(self, request: oracledatabase.ListGiVersionsRequest, me """ return request, metadata - def post_list_gi_versions(self, response: oracledatabase.ListGiVersionsResponse) -> oracledatabase.ListGiVersionsResponse: + def post_list_gi_versions( + self, response: oracledatabase.ListGiVersionsResponse + ) -> oracledatabase.ListGiVersionsResponse: """Post-rpc interceptor for list_gi_versions Override in a subclass to manipulate the response @@ -589,7 +767,14 @@ def post_list_gi_versions(self, response: oracledatabase.ListGiVersionsResponse) it is returned to user code. """ return response - def pre_restore_autonomous_database(self, request: oracledatabase.RestoreAutonomousDatabaseRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.RestoreAutonomousDatabaseRequest, Sequence[Tuple[str, str]]]: + + def pre_restore_autonomous_database( + self, + request: oracledatabase.RestoreAutonomousDatabaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.RestoreAutonomousDatabaseRequest, Sequence[Tuple[str, str]] + ]: """Pre-rpc interceptor for restore_autonomous_database Override in a subclass to manipulate the request or metadata @@ -597,7 +782,9 @@ def pre_restore_autonomous_database(self, request: oracledatabase.RestoreAutonom """ return request, metadata - def post_restore_autonomous_database(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_restore_autonomous_database( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for restore_autonomous_database Override in a subclass to manipulate the response @@ -607,7 +794,9 @@ def post_restore_autonomous_database(self, response: operations_pb2.Operation) - return response def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, str]] + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_location @@ -626,8 +815,11 @@ def post_get_location( it is returned to user code. """ return response + def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, str]] + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_locations @@ -646,8 +838,11 @@ def post_list_locations( it is returned to user code. """ return response + def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for cancel_operation @@ -656,9 +851,7 @@ def pre_cancel_operation( """ return request, metadata - def post_cancel_operation( - self, response: None - ) -> None: + def post_cancel_operation(self, response: None) -> None: """Post-rpc interceptor for cancel_operation Override in a subclass to manipulate the response @@ -666,8 +859,11 @@ def post_cancel_operation( it is returned to user code. """ return response + def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for delete_operation @@ -676,9 +872,7 @@ def pre_delete_operation( """ return request, metadata - def post_delete_operation( - self, response: None - ) -> None: + def post_delete_operation(self, response: None) -> None: """Post-rpc interceptor for delete_operation Override in a subclass to manipulate the response @@ -686,8 +880,11 @@ def post_delete_operation( it is returned to user code. """ return response + def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_operation @@ -706,8 +903,11 @@ def post_get_operation( it is returned to user code. """ return response + def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_operations @@ -748,20 +948,21 @@ class OracleDatabaseRestTransport(OracleDatabaseTransport): """ - def __init__(self, *, - host: str = 'oracledatabase.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[OracleDatabaseRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "oracledatabase.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[OracleDatabaseRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -800,7 +1001,9 @@ def __init__(self, *, # credentials object maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER url_match_items = maybe_url_match.groupdict() @@ -811,10 +1014,11 @@ def __init__(self, *, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience + api_audience=api_audience, ) self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) + self._credentials, default_host=self.DEFAULT_HOST + ) self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) @@ -831,42 +1035,45 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ + "google.longrunning.Operations.CancelOperation": [ { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", }, ], - 'google.longrunning.Operations.DeleteOperation': [ + "google.longrunning.Operations.DeleteOperation": [ { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.GetOperation': [ + "google.longrunning.Operations.GetOperation": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.ListOperations': [ + "google.longrunning.Operations.ListOperations": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", }, ], } rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) # Return the client from cache. return self._operations_client @@ -875,77 +1082,88 @@ class _CreateAutonomousDatabase(OracleDatabaseRestStub): def __hash__(self): return hash("CreateAutonomousDatabase") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "autonomousDatabaseId" : "", } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "autonomousDatabaseId": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.CreateAutonomousDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the create autonomous - database method over HTTP. - - Args: - request (~.oracledatabase.CreateAutonomousDatabaseRequest): - The request object. The request for ``AutonomousDatabase.Create``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + def __call__( + self, + request: oracledatabase.CreateAutonomousDatabaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create autonomous + database method over HTTP. + + Args: + request (~.oracledatabase.CreateAutonomousDatabaseRequest): + The request object. The request for ``AutonomousDatabase.Create``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/autonomousDatabases', - 'body': 'autonomous_database', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/autonomousDatabases", + "body": "autonomous_database", + }, ] - request, metadata = self._interceptor.pre_create_autonomous_database(request, metadata) + request, metadata = self._interceptor.pre_create_autonomous_database( + request, metadata + ) pb_request = oracledatabase.CreateAutonomousDatabaseRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True + transcoded_request["body"], use_integers_for_enums=True ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -962,77 +1180,93 @@ class _CreateCloudExadataInfrastructure(OracleDatabaseRestStub): def __hash__(self): return hash("CreateCloudExadataInfrastructure") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "cloudExadataInfrastructureId" : "", } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "cloudExadataInfrastructureId": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.CreateCloudExadataInfrastructureRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the create cloud exadata - infrastructure method over HTTP. - - Args: - request (~.oracledatabase.CreateCloudExadataInfrastructureRequest): - The request object. The request for ``CloudExadataInfrastructure.Create``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + def __call__( + self, + request: oracledatabase.CreateCloudExadataInfrastructureRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create cloud exadata + infrastructure method over HTTP. + + Args: + request (~.oracledatabase.CreateCloudExadataInfrastructureRequest): + The request object. The request for ``CloudExadataInfrastructure.Create``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/cloudExadataInfrastructures', - 'body': 'cloud_exadata_infrastructure', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/cloudExadataInfrastructures", + "body": "cloud_exadata_infrastructure", + }, ] - request, metadata = self._interceptor.pre_create_cloud_exadata_infrastructure(request, metadata) - pb_request = oracledatabase.CreateCloudExadataInfrastructureRequest.pb(request) + ( + request, + metadata, + ) = self._interceptor.pre_create_cloud_exadata_infrastructure( + request, metadata + ) + pb_request = oracledatabase.CreateCloudExadataInfrastructureRequest.pb( + request + ) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True + transcoded_request["body"], use_integers_for_enums=True ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1049,19 +1283,26 @@ class _CreateCloudVmCluster(OracleDatabaseRestStub): def __hash__(self): return hash("CreateCloudVmCluster") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "cloudVmClusterId" : "", } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "cloudVmClusterId": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.CreateCloudVmClusterRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.CreateCloudVmClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: r"""Call the create cloud vm cluster method over HTTP. Args: @@ -1081,44 +1322,48 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/cloudVmClusters', - 'body': 'cloud_vm_cluster', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/cloudVmClusters", + "body": "cloud_vm_cluster", + }, ] - request, metadata = self._interceptor.pre_create_cloud_vm_cluster(request, metadata) + request, metadata = self._interceptor.pre_create_cloud_vm_cluster( + request, metadata + ) pb_request = oracledatabase.CreateCloudVmClusterRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True + transcoded_request["body"], use_integers_for_enums=True ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1135,69 +1380,79 @@ class _DeleteAutonomousDatabase(OracleDatabaseRestStub): def __hash__(self): return hash("DeleteAutonomousDatabase") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.DeleteAutonomousDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete autonomous - database method over HTTP. - - Args: - request (~.oracledatabase.DeleteAutonomousDatabaseRequest): - The request object. The request for ``AutonomousDatabase.Delete``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + def __call__( + self, + request: oracledatabase.DeleteAutonomousDatabaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete autonomous + database method over HTTP. + + Args: + request (~.oracledatabase.DeleteAutonomousDatabaseRequest): + The request object. The request for ``AutonomousDatabase.Delete``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/autonomousDatabases/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/autonomousDatabases/*}", + }, ] - request, metadata = self._interceptor.pre_delete_autonomous_database(request, metadata) + request, metadata = self._interceptor.pre_delete_autonomous_database( + request, metadata + ) pb_request = oracledatabase.DeleteAutonomousDatabaseRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1214,69 +1469,84 @@ class _DeleteCloudExadataInfrastructure(OracleDatabaseRestStub): def __hash__(self): return hash("DeleteCloudExadataInfrastructure") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.DeleteCloudExadataInfrastructureRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete cloud exadata - infrastructure method over HTTP. - - Args: - request (~.oracledatabase.DeleteCloudExadataInfrastructureRequest): - The request object. The request for ``CloudExadataInfrastructure.Delete``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + def __call__( + self, + request: oracledatabase.DeleteCloudExadataInfrastructureRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete cloud exadata + infrastructure method over HTTP. + + Args: + request (~.oracledatabase.DeleteCloudExadataInfrastructureRequest): + The request object. The request for ``CloudExadataInfrastructure.Delete``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/cloudExadataInfrastructures/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/cloudExadataInfrastructures/*}", + }, ] - request, metadata = self._interceptor.pre_delete_cloud_exadata_infrastructure(request, metadata) - pb_request = oracledatabase.DeleteCloudExadataInfrastructureRequest.pb(request) + ( + request, + metadata, + ) = self._interceptor.pre_delete_cloud_exadata_infrastructure( + request, metadata + ) + pb_request = oracledatabase.DeleteCloudExadataInfrastructureRequest.pb( + request + ) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1293,19 +1563,24 @@ class _DeleteCloudVmCluster(OracleDatabaseRestStub): def __hash__(self): return hash("DeleteCloudVmCluster") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.DeleteCloudVmClusterRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.DeleteCloudVmClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: r"""Call the delete cloud vm cluster method over HTTP. Args: @@ -1325,36 +1600,41 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/cloudVmClusters/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/cloudVmClusters/*}", + }, ] - request, metadata = self._interceptor.pre_delete_cloud_vm_cluster(request, metadata) + request, metadata = self._interceptor.pre_delete_cloud_vm_cluster( + request, metadata + ) pb_request = oracledatabase.DeleteCloudVmClusterRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1371,74 +1651,88 @@ class _GenerateAutonomousDatabaseWallet(OracleDatabaseRestStub): def __hash__(self): return hash("GenerateAutonomousDatabaseWallet") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.GenerateAutonomousDatabaseWalletRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> oracledatabase.GenerateAutonomousDatabaseWalletResponse: - r"""Call the generate autonomous - database wallet method over HTTP. - - Args: - request (~.oracledatabase.GenerateAutonomousDatabaseWalletRequest): - The request object. The request for ``AutonomousDatabase.GenerateWallet``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } - Returns: - ~.oracledatabase.GenerateAutonomousDatabaseWalletResponse: - The response for ``AutonomousDatabase.GenerateWallet``. + def __call__( + self, + request: oracledatabase.GenerateAutonomousDatabaseWalletRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.GenerateAutonomousDatabaseWalletResponse: + r"""Call the generate autonomous + database wallet method over HTTP. + + Args: + request (~.oracledatabase.GenerateAutonomousDatabaseWalletRequest): + The request object. The request for ``AutonomousDatabase.GenerateWallet``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.GenerateAutonomousDatabaseWalletResponse: + The response for ``AutonomousDatabase.GenerateWallet``. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/autonomousDatabases/*}:generateWallet', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/autonomousDatabases/*}:generateWallet", + "body": "*", + }, ] - request, metadata = self._interceptor.pre_generate_autonomous_database_wallet(request, metadata) - pb_request = oracledatabase.GenerateAutonomousDatabaseWalletRequest.pb(request) + ( + request, + metadata, + ) = self._interceptor.pre_generate_autonomous_database_wallet( + request, metadata + ) + pb_request = oracledatabase.GenerateAutonomousDatabaseWalletRequest.pb( + request + ) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True + transcoded_request["body"], use_integers_for_enums=True ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1457,19 +1751,24 @@ class _GetAutonomousDatabase(OracleDatabaseRestStub): def __hash__(self): return hash("GetAutonomousDatabase") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.GetAutonomousDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> autonomous_database.AutonomousDatabase: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.GetAutonomousDatabaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autonomous_database.AutonomousDatabase: r"""Call the get autonomous database method over HTTP. Args: @@ -1489,36 +1788,41 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/autonomousDatabases/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/autonomousDatabases/*}", + }, ] - request, metadata = self._interceptor.pre_get_autonomous_database(request, metadata) + request, metadata = self._interceptor.pre_get_autonomous_database( + request, metadata + ) pb_request = oracledatabase.GetAutonomousDatabaseRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1537,69 +1841,79 @@ class _GetCloudExadataInfrastructure(OracleDatabaseRestStub): def __hash__(self): return hash("GetCloudExadataInfrastructure") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.GetCloudExadataInfrastructureRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> exadata_infra.CloudExadataInfrastructure: - r"""Call the get cloud exadata - infrastructure method over HTTP. - - Args: - request (~.oracledatabase.GetCloudExadataInfrastructureRequest): - The request object. The request for ``CloudExadataInfrastructure.Get``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } - Returns: - ~.exadata_infra.CloudExadataInfrastructure: - Represents CloudExadataInfrastructure - resource. - https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/CloudExadataInfrastructure/ + def __call__( + self, + request: oracledatabase.GetCloudExadataInfrastructureRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> exadata_infra.CloudExadataInfrastructure: + r"""Call the get cloud exadata + infrastructure method over HTTP. + + Args: + request (~.oracledatabase.GetCloudExadataInfrastructureRequest): + The request object. The request for ``CloudExadataInfrastructure.Get``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.exadata_infra.CloudExadataInfrastructure: + Represents CloudExadataInfrastructure + resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/CloudExadataInfrastructure/ """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/cloudExadataInfrastructures/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/cloudExadataInfrastructures/*}", + }, ] - request, metadata = self._interceptor.pre_get_cloud_exadata_infrastructure(request, metadata) + request, metadata = self._interceptor.pre_get_cloud_exadata_infrastructure( + request, metadata + ) pb_request = oracledatabase.GetCloudExadataInfrastructureRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1618,19 +1932,24 @@ class _GetCloudVmCluster(OracleDatabaseRestStub): def __hash__(self): return hash("GetCloudVmCluster") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.GetCloudVmClusterRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> vm_cluster.CloudVmCluster: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.GetCloudVmClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vm_cluster.CloudVmCluster: r"""Call the get cloud vm cluster method over HTTP. Args: @@ -1650,36 +1969,41 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/cloudVmClusters/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/cloudVmClusters/*}", + }, ] - request, metadata = self._interceptor.pre_get_cloud_vm_cluster(request, metadata) + request, metadata = self._interceptor.pre_get_cloud_vm_cluster( + request, metadata + ) pb_request = oracledatabase.GetCloudVmClusterRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1698,66 +2022,76 @@ class _ListAutonomousDatabaseBackups(OracleDatabaseRestStub): def __hash__(self): return hash("ListAutonomousDatabaseBackups") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.ListAutonomousDatabaseBackupsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> oracledatabase.ListAutonomousDatabaseBackupsResponse: - r"""Call the list autonomous database - backups method over HTTP. - - Args: - request (~.oracledatabase.ListAutonomousDatabaseBackupsRequest): - The request object. The request for ``AutonomousDatabaseBackup.List``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } - Returns: - ~.oracledatabase.ListAutonomousDatabaseBackupsResponse: - The response for ``AutonomousDatabaseBackup.List``. + def __call__( + self, + request: oracledatabase.ListAutonomousDatabaseBackupsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListAutonomousDatabaseBackupsResponse: + r"""Call the list autonomous database + backups method over HTTP. + + Args: + request (~.oracledatabase.ListAutonomousDatabaseBackupsRequest): + The request object. The request for ``AutonomousDatabaseBackup.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListAutonomousDatabaseBackupsResponse: + The response for ``AutonomousDatabaseBackup.List``. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/autonomousDatabaseBackups', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/autonomousDatabaseBackups", + }, ] - request, metadata = self._interceptor.pre_list_autonomous_database_backups(request, metadata) + request, metadata = self._interceptor.pre_list_autonomous_database_backups( + request, metadata + ) pb_request = oracledatabase.ListAutonomousDatabaseBackupsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1776,68 +2110,83 @@ class _ListAutonomousDatabaseCharacterSets(OracleDatabaseRestStub): def __hash__(self): return hash("ListAutonomousDatabaseCharacterSets") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> oracledatabase.ListAutonomousDatabaseCharacterSetsResponse: - r"""Call the list autonomous database - character sets method over HTTP. - - Args: - request (~.oracledatabase.ListAutonomousDatabaseCharacterSetsRequest): - The request object. The request for ``AutonomousDatabaseCharacterSet.List``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } - Returns: - ~.oracledatabase.ListAutonomousDatabaseCharacterSetsResponse: - The response for - ``AutonomousDatabaseCharacterSet.List``. + def __call__( + self, + request: oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListAutonomousDatabaseCharacterSetsResponse: + r"""Call the list autonomous database + character sets method over HTTP. + + Args: + request (~.oracledatabase.ListAutonomousDatabaseCharacterSetsRequest): + The request object. The request for ``AutonomousDatabaseCharacterSet.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListAutonomousDatabaseCharacterSetsResponse: + The response for + ``AutonomousDatabaseCharacterSet.List``. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/autonomousDatabaseCharacterSets', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/autonomousDatabaseCharacterSets", + }, ] - request, metadata = self._interceptor.pre_list_autonomous_database_character_sets(request, metadata) - pb_request = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest.pb(request) + ( + request, + metadata, + ) = self._interceptor.pre_list_autonomous_database_character_sets( + request, metadata + ) + pb_request = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest.pb( + request + ) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1846,7 +2195,9 @@ def __call__(self, # Return the response resp = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse() - pb_resp = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.pb(resp) + pb_resp = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.pb( + resp + ) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_autonomous_database_character_sets(resp) @@ -1856,19 +2207,24 @@ class _ListAutonomousDatabases(OracleDatabaseRestStub): def __hash__(self): return hash("ListAutonomousDatabases") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.ListAutonomousDatabasesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> oracledatabase.ListAutonomousDatabasesResponse: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListAutonomousDatabasesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListAutonomousDatabasesResponse: r"""Call the list autonomous databases method over HTTP. Args: @@ -1885,36 +2241,41 @@ def __call__(self, The response for ``AutonomousDatabase.List``. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/autonomousDatabases', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/autonomousDatabases", + }, ] - request, metadata = self._interceptor.pre_list_autonomous_databases(request, metadata) + request, metadata = self._interceptor.pre_list_autonomous_databases( + request, metadata + ) pb_request = oracledatabase.ListAutonomousDatabasesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1933,66 +2294,76 @@ class _ListAutonomousDbVersions(OracleDatabaseRestStub): def __hash__(self): return hash("ListAutonomousDbVersions") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.ListAutonomousDbVersionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> oracledatabase.ListAutonomousDbVersionsResponse: - r"""Call the list autonomous db - versions method over HTTP. - - Args: - request (~.oracledatabase.ListAutonomousDbVersionsRequest): - The request object. The request for ``AutonomousDbVersion.List``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } - Returns: - ~.oracledatabase.ListAutonomousDbVersionsResponse: - The response for ``AutonomousDbVersion.List``. + def __call__( + self, + request: oracledatabase.ListAutonomousDbVersionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListAutonomousDbVersionsResponse: + r"""Call the list autonomous db + versions method over HTTP. + + Args: + request (~.oracledatabase.ListAutonomousDbVersionsRequest): + The request object. The request for ``AutonomousDbVersion.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListAutonomousDbVersionsResponse: + The response for ``AutonomousDbVersion.List``. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/autonomousDbVersions', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/autonomousDbVersions", + }, ] - request, metadata = self._interceptor.pre_list_autonomous_db_versions(request, metadata) + request, metadata = self._interceptor.pre_list_autonomous_db_versions( + request, metadata + ) pb_request = oracledatabase.ListAutonomousDbVersionsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2011,66 +2382,81 @@ class _ListCloudExadataInfrastructures(OracleDatabaseRestStub): def __hash__(self): return hash("ListCloudExadataInfrastructures") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.ListCloudExadataInfrastructuresRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> oracledatabase.ListCloudExadataInfrastructuresResponse: - r"""Call the list cloud exadata - infrastructures method over HTTP. - - Args: - request (~.oracledatabase.ListCloudExadataInfrastructuresRequest): - The request object. The request for ``CloudExadataInfrastructures.List``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } - Returns: - ~.oracledatabase.ListCloudExadataInfrastructuresResponse: - The response for ``CloudExadataInfrastructures.list``. + def __call__( + self, + request: oracledatabase.ListCloudExadataInfrastructuresRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListCloudExadataInfrastructuresResponse: + r"""Call the list cloud exadata + infrastructures method over HTTP. + + Args: + request (~.oracledatabase.ListCloudExadataInfrastructuresRequest): + The request object. The request for ``CloudExadataInfrastructures.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListCloudExadataInfrastructuresResponse: + The response for ``CloudExadataInfrastructures.list``. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/cloudExadataInfrastructures', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/cloudExadataInfrastructures", + }, ] - request, metadata = self._interceptor.pre_list_cloud_exadata_infrastructures(request, metadata) - pb_request = oracledatabase.ListCloudExadataInfrastructuresRequest.pb(request) + ( + request, + metadata, + ) = self._interceptor.pre_list_cloud_exadata_infrastructures( + request, metadata + ) + pb_request = oracledatabase.ListCloudExadataInfrastructuresRequest.pb( + request + ) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2089,19 +2475,24 @@ class _ListCloudVmClusters(OracleDatabaseRestStub): def __hash__(self): return hash("ListCloudVmClusters") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.ListCloudVmClustersRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> oracledatabase.ListCloudVmClustersResponse: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListCloudVmClustersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListCloudVmClustersResponse: r"""Call the list cloud vm clusters method over HTTP. Args: @@ -2118,36 +2509,41 @@ def __call__(self, The response for ``CloudVmCluster.List``. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/cloudVmClusters', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/cloudVmClusters", + }, ] - request, metadata = self._interceptor.pre_list_cloud_vm_clusters(request, metadata) + request, metadata = self._interceptor.pre_list_cloud_vm_clusters( + request, metadata + ) pb_request = oracledatabase.ListCloudVmClustersRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2166,19 +2562,24 @@ class _ListDbNodes(OracleDatabaseRestStub): def __hash__(self): return hash("ListDbNodes") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.ListDbNodesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> oracledatabase.ListDbNodesResponse: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListDbNodesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListDbNodesResponse: r"""Call the list db nodes method over HTTP. Args: @@ -2195,36 +2596,39 @@ def __call__(self, The response for ``DbNode.List``. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/cloudVmClusters/*}/dbNodes', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/cloudVmClusters/*}/dbNodes", + }, ] request, metadata = self._interceptor.pre_list_db_nodes(request, metadata) pb_request = oracledatabase.ListDbNodesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2243,19 +2647,24 @@ class _ListDbServers(OracleDatabaseRestStub): def __hash__(self): return hash("ListDbServers") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.ListDbServersRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> oracledatabase.ListDbServersResponse: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListDbServersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListDbServersResponse: r"""Call the list db servers method over HTTP. Args: @@ -2272,36 +2681,39 @@ def __call__(self, The response for ``DbServer.List``. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/cloudExadataInfrastructures/*}/dbServers', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/cloudExadataInfrastructures/*}/dbServers", + }, ] request, metadata = self._interceptor.pre_list_db_servers(request, metadata) pb_request = oracledatabase.ListDbServersRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2320,19 +2732,24 @@ class _ListDbSystemShapes(OracleDatabaseRestStub): def __hash__(self): return hash("ListDbSystemShapes") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.ListDbSystemShapesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> oracledatabase.ListDbSystemShapesResponse: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListDbSystemShapesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListDbSystemShapesResponse: r"""Call the list db system shapes method over HTTP. Args: @@ -2349,36 +2766,41 @@ def __call__(self, The response for ``DbSystemShape.List``. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/dbSystemShapes', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/dbSystemShapes", + }, ] - request, metadata = self._interceptor.pre_list_db_system_shapes(request, metadata) + request, metadata = self._interceptor.pre_list_db_system_shapes( + request, metadata + ) pb_request = oracledatabase.ListDbSystemShapesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2397,19 +2819,24 @@ class _ListEntitlements(OracleDatabaseRestStub): def __hash__(self): return hash("ListEntitlements") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.ListEntitlementsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> oracledatabase.ListEntitlementsResponse: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListEntitlementsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListEntitlementsResponse: r"""Call the list entitlements method over HTTP. Args: @@ -2426,36 +2853,41 @@ def __call__(self, The response for ``Entitlement.List``. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/entitlements', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/entitlements", + }, ] - request, metadata = self._interceptor.pre_list_entitlements(request, metadata) + request, metadata = self._interceptor.pre_list_entitlements( + request, metadata + ) pb_request = oracledatabase.ListEntitlementsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2474,19 +2906,24 @@ class _ListGiVersions(OracleDatabaseRestStub): def __hash__(self): return hash("ListGiVersions") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.ListGiVersionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> oracledatabase.ListGiVersionsResponse: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListGiVersionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListGiVersionsResponse: r"""Call the list gi versions method over HTTP. Args: @@ -2503,36 +2940,41 @@ def __call__(self, The response for ``GiVersion.List``. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/giVersions', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/giVersions", + }, ] - request, metadata = self._interceptor.pre_list_gi_versions(request, metadata) + request, metadata = self._interceptor.pre_list_gi_versions( + request, metadata + ) pb_request = oracledatabase.ListGiVersionsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2551,77 +2993,86 @@ class _RestoreAutonomousDatabase(OracleDatabaseRestStub): def __hash__(self): return hash("RestoreAutonomousDatabase") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.RestoreAutonomousDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the restore autonomous - database method over HTTP. - - Args: - request (~.oracledatabase.RestoreAutonomousDatabaseRequest): - The request object. The request for ``AutonomousDatabase.Restore``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + def __call__( + self, + request: oracledatabase.RestoreAutonomousDatabaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the restore autonomous + database method over HTTP. + + Args: + request (~.oracledatabase.RestoreAutonomousDatabaseRequest): + The request object. The request for ``AutonomousDatabase.Restore``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/autonomousDatabases/*}:restore', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/autonomousDatabases/*}:restore", + "body": "*", + }, ] - request, metadata = self._interceptor.pre_restore_autonomous_database(request, metadata) + request, metadata = self._interceptor.pre_restore_autonomous_database( + request, metadata + ) pb_request = oracledatabase.RestoreAutonomousDatabaseRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True + transcoded_request["body"], use_integers_for_enums=True ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2635,193 +3086,249 @@ def __call__(self, return resp @property - def create_autonomous_database(self) -> Callable[ - [oracledatabase.CreateAutonomousDatabaseRequest], - operations_pb2.Operation]: + def create_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.CreateAutonomousDatabaseRequest], operations_pb2.Operation + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateAutonomousDatabase(self._session, self._host, self._interceptor) # type: ignore + return self._CreateAutonomousDatabase(self._session, self._host, self._interceptor) # type: ignore @property - def create_cloud_exadata_infrastructure(self) -> Callable[ - [oracledatabase.CreateCloudExadataInfrastructureRequest], - operations_pb2.Operation]: + def create_cloud_exadata_infrastructure( + self, + ) -> Callable[ + [oracledatabase.CreateCloudExadataInfrastructureRequest], + operations_pb2.Operation, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateCloudExadataInfrastructure(self._session, self._host, self._interceptor) # type: ignore + return self._CreateCloudExadataInfrastructure(self._session, self._host, self._interceptor) # type: ignore @property - def create_cloud_vm_cluster(self) -> Callable[ - [oracledatabase.CreateCloudVmClusterRequest], - operations_pb2.Operation]: + def create_cloud_vm_cluster( + self, + ) -> Callable[ + [oracledatabase.CreateCloudVmClusterRequest], operations_pb2.Operation + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateCloudVmCluster(self._session, self._host, self._interceptor) # type: ignore + return self._CreateCloudVmCluster(self._session, self._host, self._interceptor) # type: ignore @property - def delete_autonomous_database(self) -> Callable[ - [oracledatabase.DeleteAutonomousDatabaseRequest], - operations_pb2.Operation]: + def delete_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.DeleteAutonomousDatabaseRequest], operations_pb2.Operation + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteAutonomousDatabase(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteAutonomousDatabase(self._session, self._host, self._interceptor) # type: ignore @property - def delete_cloud_exadata_infrastructure(self) -> Callable[ - [oracledatabase.DeleteCloudExadataInfrastructureRequest], - operations_pb2.Operation]: + def delete_cloud_exadata_infrastructure( + self, + ) -> Callable[ + [oracledatabase.DeleteCloudExadataInfrastructureRequest], + operations_pb2.Operation, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteCloudExadataInfrastructure(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteCloudExadataInfrastructure(self._session, self._host, self._interceptor) # type: ignore @property - def delete_cloud_vm_cluster(self) -> Callable[ - [oracledatabase.DeleteCloudVmClusterRequest], - operations_pb2.Operation]: + def delete_cloud_vm_cluster( + self, + ) -> Callable[ + [oracledatabase.DeleteCloudVmClusterRequest], operations_pb2.Operation + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteCloudVmCluster(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteCloudVmCluster(self._session, self._host, self._interceptor) # type: ignore @property - def generate_autonomous_database_wallet(self) -> Callable[ - [oracledatabase.GenerateAutonomousDatabaseWalletRequest], - oracledatabase.GenerateAutonomousDatabaseWalletResponse]: + def generate_autonomous_database_wallet( + self, + ) -> Callable[ + [oracledatabase.GenerateAutonomousDatabaseWalletRequest], + oracledatabase.GenerateAutonomousDatabaseWalletResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GenerateAutonomousDatabaseWallet(self._session, self._host, self._interceptor) # type: ignore + return self._GenerateAutonomousDatabaseWallet(self._session, self._host, self._interceptor) # type: ignore @property - def get_autonomous_database(self) -> Callable[ - [oracledatabase.GetAutonomousDatabaseRequest], - autonomous_database.AutonomousDatabase]: + def get_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.GetAutonomousDatabaseRequest], + autonomous_database.AutonomousDatabase, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetAutonomousDatabase(self._session, self._host, self._interceptor) # type: ignore + return self._GetAutonomousDatabase(self._session, self._host, self._interceptor) # type: ignore @property - def get_cloud_exadata_infrastructure(self) -> Callable[ - [oracledatabase.GetCloudExadataInfrastructureRequest], - exadata_infra.CloudExadataInfrastructure]: + def get_cloud_exadata_infrastructure( + self, + ) -> Callable[ + [oracledatabase.GetCloudExadataInfrastructureRequest], + exadata_infra.CloudExadataInfrastructure, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetCloudExadataInfrastructure(self._session, self._host, self._interceptor) # type: ignore + return self._GetCloudExadataInfrastructure(self._session, self._host, self._interceptor) # type: ignore @property - def get_cloud_vm_cluster(self) -> Callable[ - [oracledatabase.GetCloudVmClusterRequest], - vm_cluster.CloudVmCluster]: + def get_cloud_vm_cluster( + self, + ) -> Callable[[oracledatabase.GetCloudVmClusterRequest], vm_cluster.CloudVmCluster]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetCloudVmCluster(self._session, self._host, self._interceptor) # type: ignore + return self._GetCloudVmCluster(self._session, self._host, self._interceptor) # type: ignore @property - def list_autonomous_database_backups(self) -> Callable[ - [oracledatabase.ListAutonomousDatabaseBackupsRequest], - oracledatabase.ListAutonomousDatabaseBackupsResponse]: + def list_autonomous_database_backups( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDatabaseBackupsRequest], + oracledatabase.ListAutonomousDatabaseBackupsResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListAutonomousDatabaseBackups(self._session, self._host, self._interceptor) # type: ignore + return self._ListAutonomousDatabaseBackups(self._session, self._host, self._interceptor) # type: ignore @property - def list_autonomous_database_character_sets(self) -> Callable[ - [oracledatabase.ListAutonomousDatabaseCharacterSetsRequest], - oracledatabase.ListAutonomousDatabaseCharacterSetsResponse]: + def list_autonomous_database_character_sets( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDatabaseCharacterSetsRequest], + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListAutonomousDatabaseCharacterSets(self._session, self._host, self._interceptor) # type: ignore + return self._ListAutonomousDatabaseCharacterSets(self._session, self._host, self._interceptor) # type: ignore @property - def list_autonomous_databases(self) -> Callable[ - [oracledatabase.ListAutonomousDatabasesRequest], - oracledatabase.ListAutonomousDatabasesResponse]: + def list_autonomous_databases( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDatabasesRequest], + oracledatabase.ListAutonomousDatabasesResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListAutonomousDatabases(self._session, self._host, self._interceptor) # type: ignore + return self._ListAutonomousDatabases(self._session, self._host, self._interceptor) # type: ignore @property - def list_autonomous_db_versions(self) -> Callable[ - [oracledatabase.ListAutonomousDbVersionsRequest], - oracledatabase.ListAutonomousDbVersionsResponse]: + def list_autonomous_db_versions( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDbVersionsRequest], + oracledatabase.ListAutonomousDbVersionsResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListAutonomousDbVersions(self._session, self._host, self._interceptor) # type: ignore + return self._ListAutonomousDbVersions(self._session, self._host, self._interceptor) # type: ignore @property - def list_cloud_exadata_infrastructures(self) -> Callable[ - [oracledatabase.ListCloudExadataInfrastructuresRequest], - oracledatabase.ListCloudExadataInfrastructuresResponse]: + def list_cloud_exadata_infrastructures( + self, + ) -> Callable[ + [oracledatabase.ListCloudExadataInfrastructuresRequest], + oracledatabase.ListCloudExadataInfrastructuresResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListCloudExadataInfrastructures(self._session, self._host, self._interceptor) # type: ignore + return self._ListCloudExadataInfrastructures(self._session, self._host, self._interceptor) # type: ignore @property - def list_cloud_vm_clusters(self) -> Callable[ - [oracledatabase.ListCloudVmClustersRequest], - oracledatabase.ListCloudVmClustersResponse]: + def list_cloud_vm_clusters( + self, + ) -> Callable[ + [oracledatabase.ListCloudVmClustersRequest], + oracledatabase.ListCloudVmClustersResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListCloudVmClusters(self._session, self._host, self._interceptor) # type: ignore + return self._ListCloudVmClusters(self._session, self._host, self._interceptor) # type: ignore @property - def list_db_nodes(self) -> Callable[ - [oracledatabase.ListDbNodesRequest], - oracledatabase.ListDbNodesResponse]: + def list_db_nodes( + self, + ) -> Callable[ + [oracledatabase.ListDbNodesRequest], oracledatabase.ListDbNodesResponse + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListDbNodes(self._session, self._host, self._interceptor) # type: ignore + return self._ListDbNodes(self._session, self._host, self._interceptor) # type: ignore @property - def list_db_servers(self) -> Callable[ - [oracledatabase.ListDbServersRequest], - oracledatabase.ListDbServersResponse]: + def list_db_servers( + self, + ) -> Callable[ + [oracledatabase.ListDbServersRequest], oracledatabase.ListDbServersResponse + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListDbServers(self._session, self._host, self._interceptor) # type: ignore + return self._ListDbServers(self._session, self._host, self._interceptor) # type: ignore @property - def list_db_system_shapes(self) -> Callable[ - [oracledatabase.ListDbSystemShapesRequest], - oracledatabase.ListDbSystemShapesResponse]: + def list_db_system_shapes( + self, + ) -> Callable[ + [oracledatabase.ListDbSystemShapesRequest], + oracledatabase.ListDbSystemShapesResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListDbSystemShapes(self._session, self._host, self._interceptor) # type: ignore + return self._ListDbSystemShapes(self._session, self._host, self._interceptor) # type: ignore @property - def list_entitlements(self) -> Callable[ - [oracledatabase.ListEntitlementsRequest], - oracledatabase.ListEntitlementsResponse]: + def list_entitlements( + self, + ) -> Callable[ + [oracledatabase.ListEntitlementsRequest], + oracledatabase.ListEntitlementsResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListEntitlements(self._session, self._host, self._interceptor) # type: ignore + return self._ListEntitlements(self._session, self._host, self._interceptor) # type: ignore @property - def list_gi_versions(self) -> Callable[ - [oracledatabase.ListGiVersionsRequest], - oracledatabase.ListGiVersionsResponse]: + def list_gi_versions( + self, + ) -> Callable[ + [oracledatabase.ListGiVersionsRequest], oracledatabase.ListGiVersionsResponse + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListGiVersions(self._session, self._host, self._interceptor) # type: ignore + return self._ListGiVersions(self._session, self._host, self._interceptor) # type: ignore @property - def restore_autonomous_database(self) -> Callable[ - [oracledatabase.RestoreAutonomousDatabaseRequest], - operations_pb2.Operation]: + def restore_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.RestoreAutonomousDatabaseRequest], operations_pb2.Operation + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._RestoreAutonomousDatabase(self._session, self._host, self._interceptor) # type: ignore + return self._RestoreAutonomousDatabase(self._session, self._host, self._interceptor) # type: ignore @property def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore class _GetLocation(OracleDatabaseRestStub): - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> locations_pb2.Location: - + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: r"""Call the get location method over HTTP. Args: @@ -2837,26 +3344,26 @@ def __call__(self, locations_pb2.Location: Response from GetLocation method. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, ] request, metadata = self._interceptor.pre_get_location(request, metadata) request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), @@ -2877,16 +3384,17 @@ def __call__(self, @property def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore class _ListLocations(OracleDatabaseRestStub): - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> locations_pb2.ListLocationsResponse: - + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: r"""Call the list locations method over HTTP. Args: @@ -2902,26 +3410,26 @@ def __call__(self, locations_pb2.ListLocationsResponse: Response from ListLocations method. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/locations', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, ] request, metadata = self._interceptor.pre_list_locations(request, metadata) request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), @@ -2942,16 +3450,17 @@ def __call__(self, @property def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore class _CancelOperation(OracleDatabaseRestStub): - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: r"""Call the cancel operation method over HTTP. Args: @@ -2964,28 +3473,30 @@ def __call__(self, sent along with the request as metadata. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, ] - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), @@ -3004,16 +3515,17 @@ def __call__(self, @property def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore class _DeleteOperation(OracleDatabaseRestStub): - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: r"""Call the delete operation method over HTTP. Args: @@ -3026,26 +3538,28 @@ def __call__(self, sent along with the request as metadata. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] - request, metadata = self._interceptor.pre_delete_operation(request, metadata) + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), @@ -3063,16 +3577,17 @@ def __call__(self, @property def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore class _GetOperation(OracleDatabaseRestStub): - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. Args: @@ -3088,26 +3603,26 @@ def __call__(self, operations_pb2.Operation: Response from GetOperation method. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] request, metadata = self._interceptor.pre_get_operation(request, metadata) request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), @@ -3128,16 +3643,17 @@ def __call__(self, @property def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore class _ListOperations(OracleDatabaseRestStub): - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.ListOperationsResponse: - + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. Args: @@ -3153,26 +3669,26 @@ def __call__(self, operations_pb2.ListOperationsResponse: Response from ListOperations method. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, ] request, metadata = self._interceptor.pre_list_operations(request, metadata) request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), @@ -3199,6 +3715,4 @@ def close(self): self._session.close() -__all__=( - 'OracleDatabaseRestTransport', -) +__all__ = ("OracleDatabaseRestTransport",) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/__init__.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/__init__.py new file mode 100644 index 000000000000..e5079e7c48c9 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/__init__.py @@ -0,0 +1,157 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .autonomous_database import ( + AllConnectionStrings, + AutonomousDatabase, + AutonomousDatabaseApex, + AutonomousDatabaseConnectionStrings, + AutonomousDatabaseConnectionUrls, + AutonomousDatabaseProperties, + AutonomousDatabaseStandbySummary, + DatabaseConnectionStringProfile, + DBWorkload, + GenerateType, + OperationsInsightsState, + ScheduledOperationDetails, + State, +) +from .autonomous_database_character_set import AutonomousDatabaseCharacterSet +from .autonomous_db_backup import ( + AutonomousDatabaseBackup, + AutonomousDatabaseBackupProperties, +) +from .autonomous_db_version import AutonomousDbVersion +from .common import CustomerContact +from .db_node import DbNode, DbNodeProperties +from .db_server import DbServer, DbServerProperties +from .db_system_shape import DbSystemShape +from .entitlement import CloudAccountDetails, Entitlement +from .exadata_infra import ( + CloudExadataInfrastructure, + CloudExadataInfrastructureProperties, + MaintenanceWindow, +) +from .gi_version import GiVersion +from .location_metadata import LocationMetadata +from .oracledatabase import ( + CreateAutonomousDatabaseRequest, + CreateCloudExadataInfrastructureRequest, + CreateCloudVmClusterRequest, + DeleteAutonomousDatabaseRequest, + DeleteCloudExadataInfrastructureRequest, + DeleteCloudVmClusterRequest, + GenerateAutonomousDatabaseWalletRequest, + GenerateAutonomousDatabaseWalletResponse, + GetAutonomousDatabaseRequest, + GetCloudExadataInfrastructureRequest, + GetCloudVmClusterRequest, + ListAutonomousDatabaseBackupsRequest, + ListAutonomousDatabaseBackupsResponse, + ListAutonomousDatabaseCharacterSetsRequest, + ListAutonomousDatabaseCharacterSetsResponse, + ListAutonomousDatabasesRequest, + ListAutonomousDatabasesResponse, + ListAutonomousDbVersionsRequest, + ListAutonomousDbVersionsResponse, + ListCloudExadataInfrastructuresRequest, + ListCloudExadataInfrastructuresResponse, + ListCloudVmClustersRequest, + ListCloudVmClustersResponse, + ListDbNodesRequest, + ListDbNodesResponse, + ListDbServersRequest, + ListDbServersResponse, + ListDbSystemShapesRequest, + ListDbSystemShapesResponse, + ListEntitlementsRequest, + ListEntitlementsResponse, + ListGiVersionsRequest, + ListGiVersionsResponse, + OperationMetadata, + RestoreAutonomousDatabaseRequest, +) +from .vm_cluster import CloudVmCluster, CloudVmClusterProperties, DataCollectionOptions + +__all__ = ( + "AllConnectionStrings", + "AutonomousDatabase", + "AutonomousDatabaseApex", + "AutonomousDatabaseConnectionStrings", + "AutonomousDatabaseConnectionUrls", + "AutonomousDatabaseProperties", + "AutonomousDatabaseStandbySummary", + "DatabaseConnectionStringProfile", + "ScheduledOperationDetails", + "DBWorkload", + "GenerateType", + "OperationsInsightsState", + "State", + "AutonomousDatabaseCharacterSet", + "AutonomousDatabaseBackup", + "AutonomousDatabaseBackupProperties", + "AutonomousDbVersion", + "CustomerContact", + "DbNode", + "DbNodeProperties", + "DbServer", + "DbServerProperties", + "DbSystemShape", + "CloudAccountDetails", + "Entitlement", + "CloudExadataInfrastructure", + "CloudExadataInfrastructureProperties", + "MaintenanceWindow", + "GiVersion", + "LocationMetadata", + "CreateAutonomousDatabaseRequest", + "CreateCloudExadataInfrastructureRequest", + "CreateCloudVmClusterRequest", + "DeleteAutonomousDatabaseRequest", + "DeleteCloudExadataInfrastructureRequest", + "DeleteCloudVmClusterRequest", + "GenerateAutonomousDatabaseWalletRequest", + "GenerateAutonomousDatabaseWalletResponse", + "GetAutonomousDatabaseRequest", + "GetCloudExadataInfrastructureRequest", + "GetCloudVmClusterRequest", + "ListAutonomousDatabaseBackupsRequest", + "ListAutonomousDatabaseBackupsResponse", + "ListAutonomousDatabaseCharacterSetsRequest", + "ListAutonomousDatabaseCharacterSetsResponse", + "ListAutonomousDatabasesRequest", + "ListAutonomousDatabasesResponse", + "ListAutonomousDbVersionsRequest", + "ListAutonomousDbVersionsResponse", + "ListCloudExadataInfrastructuresRequest", + "ListCloudExadataInfrastructuresResponse", + "ListCloudVmClustersRequest", + "ListCloudVmClustersResponse", + "ListDbNodesRequest", + "ListDbNodesResponse", + "ListDbServersRequest", + "ListDbServersResponse", + "ListDbSystemShapesRequest", + "ListDbSystemShapesResponse", + "ListEntitlementsRequest", + "ListEntitlementsResponse", + "ListGiVersionsRequest", + "ListGiVersionsResponse", + "OperationMetadata", + "RestoreAutonomousDatabaseRequest", + "CloudVmCluster", + "CloudVmClusterProperties", + "DataCollectionOptions", +) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_database.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_database.py similarity index 96% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_database.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_database.py index f1e5c317c8ac..907ef93bc4d5 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_database.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_database.py @@ -17,31 +17,30 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - -from google.cloud.oracledatabase_v1.types import common from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.type import dayofweek_pb2 # type: ignore from google.type import timeofday_pb2 # type: ignore +import proto # type: ignore +from google.cloud.oracledatabase_v1.types import common __protobuf__ = proto.module( - package='google.cloud.oracledatabase.v1', + package="google.cloud.oracledatabase.v1", manifest={ - 'GenerateType', - 'State', - 'OperationsInsightsState', - 'DBWorkload', - 'AutonomousDatabase', - 'AutonomousDatabaseProperties', - 'AutonomousDatabaseApex', - 'AutonomousDatabaseConnectionStrings', - 'DatabaseConnectionStringProfile', - 'AllConnectionStrings', - 'AutonomousDatabaseConnectionUrls', - 'AutonomousDatabaseStandbySummary', - 'ScheduledOperationDetails', + "GenerateType", + "State", + "OperationsInsightsState", + "DBWorkload", + "AutonomousDatabase", + "AutonomousDatabaseProperties", + "AutonomousDatabaseApex", + "AutonomousDatabaseConnectionStrings", + "DatabaseConnectionStringProfile", + "AllConnectionStrings", + "AutonomousDatabaseConnectionUrls", + "AutonomousDatabaseStandbySummary", + "ScheduledOperationDetails", }, ) @@ -277,10 +276,10 @@ class AutonomousDatabase(proto.Message): proto.STRING, number=6, ) - properties: 'AutonomousDatabaseProperties' = proto.Field( + properties: "AutonomousDatabaseProperties" = proto.Field( proto.MESSAGE, number=7, - message='AutonomousDatabaseProperties', + message="AutonomousDatabaseProperties", ) labels: MutableMapping[str, str] = proto.MapField( proto.STRING, @@ -506,6 +505,7 @@ class AutonomousDatabaseProperties(proto.Message): Output only. The date and time when maintenance will end. """ + class DatabaseEdition(proto.Enum): r"""The editions available for the Autonomous Database. @@ -733,10 +733,10 @@ class Role(proto.Enum): proto.INT32, number=63, ) - db_workload: 'DBWorkload' = proto.Field( + db_workload: "DBWorkload" = proto.Field( proto.ENUM, number=5, - enum='DBWorkload', + enum="DBWorkload", ) db_edition: DatabaseEdition = proto.Field( proto.ENUM, @@ -810,10 +810,10 @@ class Role(proto.Enum): proto.DOUBLE, number=22, ) - apex_details: 'AutonomousDatabaseApex' = proto.Field( + apex_details: "AutonomousDatabaseApex" = proto.Field( proto.MESSAGE, number=23, - message='AutonomousDatabaseApex', + message="AutonomousDatabaseApex", ) are_primary_allowlisted_ips_used: bool = proto.Field( proto.BOOL, @@ -824,10 +824,10 @@ class Role(proto.Enum): proto.STRING, number=25, ) - state: 'State' = proto.Field( + state: "State" = proto.Field( proto.ENUM, number=26, - enum='State', + enum="State", ) autonomous_container_database_id: str = proto.Field( proto.STRING, @@ -837,15 +837,15 @@ class Role(proto.Enum): proto.STRING, number=28, ) - connection_strings: 'AutonomousDatabaseConnectionStrings' = proto.Field( + connection_strings: "AutonomousDatabaseConnectionStrings" = proto.Field( proto.MESSAGE, number=29, - message='AutonomousDatabaseConnectionStrings', + message="AutonomousDatabaseConnectionStrings", ) - connection_urls: 'AutonomousDatabaseConnectionUrls' = proto.Field( + connection_urls: "AutonomousDatabaseConnectionUrls" = proto.Field( proto.MESSAGE, number=30, - message='AutonomousDatabaseConnectionUrls', + message="AutonomousDatabaseConnectionUrls", ) failed_data_recovery_duration: duration_pb2.Duration = proto.Field( proto.MESSAGE, @@ -864,10 +864,10 @@ class Role(proto.Enum): proto.INT32, number=35, ) - local_standby_db: 'AutonomousDatabaseStandbySummary' = proto.Field( + local_standby_db: "AutonomousDatabaseStandbySummary" = proto.Field( proto.MESSAGE, number=36, - message='AutonomousDatabaseStandbySummary', + message="AutonomousDatabaseStandbySummary", ) memory_per_oracle_compute_unit_gbs: int = proto.Field( proto.INT32, @@ -893,10 +893,10 @@ class Role(proto.Enum): number=41, enum=OpenMode, ) - operations_insights_state: 'OperationsInsightsState' = proto.Field( + operations_insights_state: "OperationsInsightsState" = proto.Field( proto.ENUM, number=42, - enum='OperationsInsightsState', + enum="OperationsInsightsState", ) peer_db_ids: MutableSequence[str] = proto.RepeatedField( proto.STRING, @@ -926,10 +926,12 @@ class Role(proto.Enum): number=48, enum=Role, ) - scheduled_operation_details: MutableSequence['ScheduledOperationDetails'] = proto.RepeatedField( + scheduled_operation_details: MutableSequence[ + "ScheduledOperationDetails" + ] = proto.RepeatedField( proto.MESSAGE, number=64, - message='ScheduledOperationDetails', + message="ScheduledOperationDetails", ) sql_web_developer_url: str = proto.Field( proto.STRING, @@ -1023,10 +1025,10 @@ class AutonomousDatabaseConnectionStrings(proto.Message): select values based on the structured metadata. """ - all_connection_strings: 'AllConnectionStrings' = proto.Field( + all_connection_strings: "AllConnectionStrings" = proto.Field( proto.MESSAGE, number=1, - message='AllConnectionStrings', + message="AllConnectionStrings", ) dedicated: str = proto.Field( proto.STRING, @@ -1044,10 +1046,10 @@ class AutonomousDatabaseConnectionStrings(proto.Message): proto.STRING, number=5, ) - profiles: MutableSequence['DatabaseConnectionStringProfile'] = proto.RepeatedField( + profiles: MutableSequence["DatabaseConnectionStringProfile"] = proto.RepeatedField( proto.MESSAGE, number=6, - message='DatabaseConnectionStringProfile', + message="DatabaseConnectionStringProfile", ) @@ -1085,6 +1087,7 @@ class DatabaseConnectionStringProfile(proto.Message): Output only. The value of the connection string. """ + class ConsumerGroup(proto.Enum): r"""The various consumer groups available in the connection string profile. @@ -1368,10 +1371,10 @@ class AutonomousDatabaseStandbySummary(proto.Message): proto.STRING, number=2, ) - state: 'State' = proto.Field( + state: "State" = proto.Field( proto.ENUM, number=3, - enum='State', + enum="State", ) data_guard_role_changed_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_database_character_set.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_database_character_set.py similarity index 96% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_database_character_set.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_database_character_set.py index dacd4b6dc95b..dd6bfd509fce 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_database_character_set.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_database_character_set.py @@ -19,11 +19,10 @@ import proto # type: ignore - __protobuf__ = proto.module( - package='google.cloud.oracledatabase.v1', + package="google.cloud.oracledatabase.v1", manifest={ - 'AutonomousDatabaseCharacterSet', + "AutonomousDatabaseCharacterSet", }, ) @@ -45,6 +44,7 @@ class AutonomousDatabaseCharacterSet(proto.Message): Autonomous Database which is the ID in the resource name. """ + class CharacterSetType(proto.Enum): r"""The type of character set an Autonomous Database can have. diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_db_backup.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_db_backup.py similarity index 97% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_db_backup.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_db_backup.py index c7f900cfb859..1f15eb50e02a 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_db_backup.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_db_backup.py @@ -17,16 +17,14 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - from google.protobuf import timestamp_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( - package='google.cloud.oracledatabase.v1', + package="google.cloud.oracledatabase.v1", manifest={ - 'AutonomousDatabaseBackup', - 'AutonomousDatabaseBackupProperties', + "AutonomousDatabaseBackup", + "AutonomousDatabaseBackupProperties", }, ) @@ -66,10 +64,10 @@ class AutonomousDatabaseBackup(proto.Message): proto.STRING, number=3, ) - properties: 'AutonomousDatabaseBackupProperties' = proto.Field( + properties: "AutonomousDatabaseBackupProperties" = proto.Field( proto.MESSAGE, number=4, - message='AutonomousDatabaseBackupProperties', + message="AutonomousDatabaseBackupProperties", ) labels: MutableMapping[str, str] = proto.MapField( proto.STRING, @@ -148,6 +146,7 @@ class AutonomousDatabaseBackupProperties(proto.Message): vault_id (str): Optional. The OCID of the vault. """ + class State(proto.Enum): r"""// The various lifecycle states of the Autonomous Database Backup. diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_db_version.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_db_version.py similarity index 96% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_db_version.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_db_version.py index f6773072c559..05189694df98 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_db_version.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_db_version.py @@ -21,11 +21,10 @@ from google.cloud.oracledatabase_v1.types import autonomous_database - __protobuf__ = proto.module( - package='google.cloud.oracledatabase.v1', + package="google.cloud.oracledatabase.v1", manifest={ - 'AutonomousDbVersion', + "AutonomousDbVersion", }, ) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/common.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/common.py similarity index 94% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/common.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/common.py index 54ef447a9f93..2357b454221c 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/common.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/common.py @@ -19,11 +19,10 @@ import proto # type: ignore - __protobuf__ = proto.module( - package='google.cloud.oracledatabase.v1', + package="google.cloud.oracledatabase.v1", manifest={ - 'CustomerContact', + "CustomerContact", }, ) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_node.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_node.py similarity index 96% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_node.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_node.py index 499c37d17305..4f0a7175908f 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_node.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_node.py @@ -19,12 +19,11 @@ import proto # type: ignore - __protobuf__ = proto.module( - package='google.cloud.oracledatabase.v1', + package="google.cloud.oracledatabase.v1", manifest={ - 'DbNode', - 'DbNodeProperties', + "DbNode", + "DbNodeProperties", }, ) @@ -47,10 +46,10 @@ class DbNode(proto.Message): proto.STRING, number=1, ) - properties: 'DbNodeProperties' = proto.Field( + properties: "DbNodeProperties" = proto.Field( proto.MESSAGE, number=3, - message='DbNodeProperties', + message="DbNodeProperties", ) @@ -75,6 +74,7 @@ class DbNodeProperties(proto.Message): total_cpu_core_count (int): Total CPU core count of the database node. """ + class State(proto.Enum): r"""The various lifecycle states of the database node. diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_server.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_server.py similarity index 96% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_server.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_server.py index f7b0a1cfc988..ac60975560d4 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_server.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_server.py @@ -19,12 +19,11 @@ import proto # type: ignore - __protobuf__ = proto.module( - package='google.cloud.oracledatabase.v1', + package="google.cloud.oracledatabase.v1", manifest={ - 'DbServer', - 'DbServerProperties', + "DbServer", + "DbServerProperties", }, ) @@ -54,10 +53,10 @@ class DbServer(proto.Message): proto.STRING, number=2, ) - properties: 'DbServerProperties' = proto.Field( + properties: "DbServerProperties" = proto.Field( proto.MESSAGE, number=3, - message='DbServerProperties', + message="DbServerProperties", ) @@ -88,6 +87,7 @@ class DbServerProperties(proto.Message): Output only. OCID of database nodes associated with the database server. """ + class State(proto.Enum): r"""The various lifecycle states of the database server. diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_system_shape.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_system_shape.py similarity index 97% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_system_shape.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_system_shape.py index 98637445e52b..7429af46b6cc 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_system_shape.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_system_shape.py @@ -19,11 +19,10 @@ import proto # type: ignore - __protobuf__ = proto.module( - package='google.cloud.oracledatabase.v1', + package="google.cloud.oracledatabase.v1", manifest={ - 'DbSystemShape', + "DbSystemShape", }, ) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/entitlement.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/entitlement.py similarity index 94% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/entitlement.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/entitlement.py index 6314fc926c5e..01b82a412c0b 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/entitlement.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/entitlement.py @@ -19,12 +19,11 @@ import proto # type: ignore - __protobuf__ = proto.module( - package='google.cloud.oracledatabase.v1', + package="google.cloud.oracledatabase.v1", manifest={ - 'Entitlement', - 'CloudAccountDetails', + "Entitlement", + "CloudAccountDetails", }, ) @@ -45,6 +44,7 @@ class Entitlement(proto.Message): state (google.cloud.oracledatabase_v1.types.Entitlement.State): Output only. Entitlement State. """ + class State(proto.Enum): r"""The various lifecycle states of the subscription. @@ -67,10 +67,10 @@ class State(proto.Enum): proto.STRING, number=1, ) - cloud_account_details: 'CloudAccountDetails' = proto.Field( + cloud_account_details: "CloudAccountDetails" = proto.Field( proto.MESSAGE, number=2, - message='CloudAccountDetails', + message="CloudAccountDetails", ) entitlement_id: str = proto.Field( proto.STRING, diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/exadata_infra.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/exadata_infra.py similarity index 97% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/exadata_infra.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/exadata_infra.py index 7e4669189feb..c57ed47f33ef 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/exadata_infra.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/exadata_infra.py @@ -17,20 +17,19 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - -from google.cloud.oracledatabase_v1.types import common from google.protobuf import timestamp_pb2 # type: ignore from google.type import dayofweek_pb2 # type: ignore from google.type import month_pb2 # type: ignore +import proto # type: ignore +from google.cloud.oracledatabase_v1.types import common __protobuf__ = proto.module( - package='google.cloud.oracledatabase.v1', + package="google.cloud.oracledatabase.v1", manifest={ - 'CloudExadataInfrastructure', - 'CloudExadataInfrastructureProperties', - 'MaintenanceWindow', + "CloudExadataInfrastructure", + "CloudExadataInfrastructureProperties", + "MaintenanceWindow", }, ) @@ -80,10 +79,10 @@ class CloudExadataInfrastructure(proto.Message): proto.STRING, number=4, ) - properties: 'CloudExadataInfrastructureProperties' = proto.Field( + properties: "CloudExadataInfrastructureProperties" = proto.Field( proto.MESSAGE, number=5, - message='CloudExadataInfrastructureProperties', + message="CloudExadataInfrastructureProperties", ) labels: MutableMapping[str, str] = proto.MapField( proto.STRING, @@ -189,6 +188,7 @@ class CloudExadataInfrastructureProperties(proto.Message): the database servers (dom0) in the Exadata Infrastructure. Example: 20.1.15 """ + class State(proto.Enum): r"""The various lifecycle states of the Exadata Infrastructure. @@ -243,10 +243,10 @@ class State(proto.Enum): proto.INT32, number=5, ) - maintenance_window: 'MaintenanceWindow' = proto.Field( + maintenance_window: "MaintenanceWindow" = proto.Field( proto.MESSAGE, number=6, - message='MaintenanceWindow', + message="MaintenanceWindow", ) state: State = proto.Field( proto.ENUM, @@ -390,6 +390,7 @@ class MaintenanceWindow(proto.Message): of a custom action timeout (waiting period) between database server patching operations. """ + class MaintenanceWindowPreference(proto.Enum): r"""Maintenance window preference. diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/gi_version.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/gi_version.py similarity index 95% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/gi_version.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/gi_version.py index bc14de4f4a9c..1ecf83198d06 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/gi_version.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/gi_version.py @@ -19,11 +19,10 @@ import proto # type: ignore - __protobuf__ = proto.module( - package='google.cloud.oracledatabase.v1', + package="google.cloud.oracledatabase.v1", manifest={ - 'GiVersion', + "GiVersion", }, ) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/location_metadata.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/location_metadata.py similarity index 94% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/location_metadata.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/location_metadata.py index f6d3d2d71fc0..f81798592e71 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/location_metadata.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/location_metadata.py @@ -19,11 +19,10 @@ import proto # type: ignore - __protobuf__ = proto.module( - package='google.cloud.oracledatabase.v1', + package="google.cloud.oracledatabase.v1", manifest={ - 'LocationMetadata', + "LocationMetadata", }, ) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/oracledatabase.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/oracledatabase.py similarity index 92% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/oracledatabase.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/oracledatabase.py index 038dbf0e4c44..796dbe2203f9 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/oracledatabase.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/oracledatabase.py @@ -17,60 +17,63 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - -from google.cloud.oracledatabase_v1.types import autonomous_database as gco_autonomous_database -from google.cloud.oracledatabase_v1.types import autonomous_database_character_set -from google.cloud.oracledatabase_v1.types import autonomous_db_backup -from google.cloud.oracledatabase_v1.types import autonomous_db_version -from google.cloud.oracledatabase_v1.types import db_node -from google.cloud.oracledatabase_v1.types import db_server -from google.cloud.oracledatabase_v1.types import db_system_shape -from google.cloud.oracledatabase_v1.types import entitlement -from google.cloud.oracledatabase_v1.types import exadata_infra -from google.cloud.oracledatabase_v1.types import gi_version -from google.cloud.oracledatabase_v1.types import vm_cluster from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore +from google.cloud.oracledatabase_v1.types import ( + autonomous_database_character_set, + autonomous_db_backup, + autonomous_db_version, + db_node, + db_server, + db_system_shape, + entitlement, + exadata_infra, + gi_version, + vm_cluster, +) +from google.cloud.oracledatabase_v1.types import ( + autonomous_database as gco_autonomous_database, +) __protobuf__ = proto.module( - package='google.cloud.oracledatabase.v1', + package="google.cloud.oracledatabase.v1", manifest={ - 'ListCloudExadataInfrastructuresRequest', - 'ListCloudExadataInfrastructuresResponse', - 'GetCloudExadataInfrastructureRequest', - 'CreateCloudExadataInfrastructureRequest', - 'DeleteCloudExadataInfrastructureRequest', - 'ListCloudVmClustersRequest', - 'ListCloudVmClustersResponse', - 'GetCloudVmClusterRequest', - 'CreateCloudVmClusterRequest', - 'DeleteCloudVmClusterRequest', - 'ListEntitlementsRequest', - 'ListEntitlementsResponse', - 'ListDbServersRequest', - 'ListDbServersResponse', - 'ListDbNodesRequest', - 'ListDbNodesResponse', - 'ListGiVersionsRequest', - 'ListGiVersionsResponse', - 'ListDbSystemShapesRequest', - 'ListDbSystemShapesResponse', - 'OperationMetadata', - 'ListAutonomousDatabasesRequest', - 'ListAutonomousDatabasesResponse', - 'GetAutonomousDatabaseRequest', - 'CreateAutonomousDatabaseRequest', - 'DeleteAutonomousDatabaseRequest', - 'RestoreAutonomousDatabaseRequest', - 'GenerateAutonomousDatabaseWalletRequest', - 'GenerateAutonomousDatabaseWalletResponse', - 'ListAutonomousDbVersionsRequest', - 'ListAutonomousDbVersionsResponse', - 'ListAutonomousDatabaseCharacterSetsRequest', - 'ListAutonomousDatabaseCharacterSetsResponse', - 'ListAutonomousDatabaseBackupsRequest', - 'ListAutonomousDatabaseBackupsResponse', + "ListCloudExadataInfrastructuresRequest", + "ListCloudExadataInfrastructuresResponse", + "GetCloudExadataInfrastructureRequest", + "CreateCloudExadataInfrastructureRequest", + "DeleteCloudExadataInfrastructureRequest", + "ListCloudVmClustersRequest", + "ListCloudVmClustersResponse", + "GetCloudVmClusterRequest", + "CreateCloudVmClusterRequest", + "DeleteCloudVmClusterRequest", + "ListEntitlementsRequest", + "ListEntitlementsResponse", + "ListDbServersRequest", + "ListDbServersResponse", + "ListDbNodesRequest", + "ListDbNodesResponse", + "ListGiVersionsRequest", + "ListGiVersionsResponse", + "ListDbSystemShapesRequest", + "ListDbSystemShapesResponse", + "OperationMetadata", + "ListAutonomousDatabasesRequest", + "ListAutonomousDatabasesResponse", + "GetAutonomousDatabaseRequest", + "CreateAutonomousDatabaseRequest", + "DeleteAutonomousDatabaseRequest", + "RestoreAutonomousDatabaseRequest", + "GenerateAutonomousDatabaseWalletRequest", + "GenerateAutonomousDatabaseWalletResponse", + "ListAutonomousDbVersionsRequest", + "ListAutonomousDbVersionsResponse", + "ListAutonomousDatabaseCharacterSetsRequest", + "ListAutonomousDatabaseCharacterSetsResponse", + "ListAutonomousDatabaseBackupsRequest", + "ListAutonomousDatabaseBackupsResponse", }, ) @@ -122,7 +125,9 @@ class ListCloudExadataInfrastructuresResponse(proto.Message): def raw_page(self): return self - cloud_exadata_infrastructures: MutableSequence[exadata_infra.CloudExadataInfrastructure] = proto.RepeatedField( + cloud_exadata_infrastructures: MutableSequence[ + exadata_infra.CloudExadataInfrastructure + ] = proto.RepeatedField( proto.MESSAGE, number=1, message=exadata_infra.CloudExadataInfrastructure, @@ -189,10 +194,12 @@ class CreateCloudExadataInfrastructureRequest(proto.Message): proto.STRING, number=2, ) - cloud_exadata_infrastructure: exadata_infra.CloudExadataInfrastructure = proto.Field( - proto.MESSAGE, - number=3, - message=exadata_infra.CloudExadataInfrastructure, + cloud_exadata_infrastructure: exadata_infra.CloudExadataInfrastructure = ( + proto.Field( + proto.MESSAGE, + number=3, + message=exadata_infra.CloudExadataInfrastructure, + ) ) request_id: str = proto.Field( proto.STRING, @@ -694,7 +701,9 @@ class ListDbSystemShapesResponse(proto.Message): def raw_page(self): return self - db_system_shapes: MutableSequence[db_system_shape.DbSystemShape] = proto.RepeatedField( + db_system_shapes: MutableSequence[ + db_system_shape.DbSystemShape + ] = proto.RepeatedField( proto.MESSAGE, number=1, message=db_system_shape.DbSystemShape, @@ -835,7 +844,9 @@ class ListAutonomousDatabasesResponse(proto.Message): def raw_page(self): return self - autonomous_databases: MutableSequence[gco_autonomous_database.AutonomousDatabase] = proto.RepeatedField( + autonomous_databases: MutableSequence[ + gco_autonomous_database.AutonomousDatabase + ] = proto.RepeatedField( proto.MESSAGE, number=1, message=gco_autonomous_database.AutonomousDatabase, @@ -1071,7 +1082,9 @@ class ListAutonomousDbVersionsResponse(proto.Message): def raw_page(self): return self - autonomous_db_versions: MutableSequence[autonomous_db_version.AutonomousDbVersion] = proto.RepeatedField( + autonomous_db_versions: MutableSequence[ + autonomous_db_version.AutonomousDbVersion + ] = proto.RepeatedField( proto.MESSAGE, number=1, message=autonomous_db_version.AutonomousDbVersion, @@ -1141,7 +1154,9 @@ class ListAutonomousDatabaseCharacterSetsResponse(proto.Message): def raw_page(self): return self - autonomous_database_character_sets: MutableSequence[autonomous_database_character_set.AutonomousDatabaseCharacterSet] = proto.RepeatedField( + autonomous_database_character_sets: MutableSequence[ + autonomous_database_character_set.AutonomousDatabaseCharacterSet + ] = proto.RepeatedField( proto.MESSAGE, number=1, message=autonomous_database_character_set.AutonomousDatabaseCharacterSet, @@ -1213,7 +1228,9 @@ class ListAutonomousDatabaseBackupsResponse(proto.Message): def raw_page(self): return self - autonomous_database_backups: MutableSequence[autonomous_db_backup.AutonomousDatabaseBackup] = proto.RepeatedField( + autonomous_database_backups: MutableSequence[ + autonomous_db_backup.AutonomousDatabaseBackup + ] = proto.RepeatedField( proto.MESSAGE, number=1, message=autonomous_db_backup.AutonomousDatabaseBackup, diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/vm_cluster.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/vm_cluster.py similarity index 97% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/vm_cluster.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/vm_cluster.py index 3fe0ae3b740b..44104d291bd3 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/vm_cluster.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/vm_cluster.py @@ -17,18 +17,16 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - from google.protobuf import timestamp_pb2 # type: ignore from google.type import datetime_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( - package='google.cloud.oracledatabase.v1', + package="google.cloud.oracledatabase.v1", manifest={ - 'CloudVmCluster', - 'CloudVmClusterProperties', - 'DataCollectionOptions', + "CloudVmCluster", + "CloudVmClusterProperties", + "DataCollectionOptions", }, ) @@ -91,10 +89,10 @@ class CloudVmCluster(proto.Message): proto.STRING, number=12, ) - properties: 'CloudVmClusterProperties' = proto.Field( + properties: "CloudVmClusterProperties" = proto.Field( proto.MESSAGE, number=6, - message='CloudVmClusterProperties', + message="CloudVmClusterProperties", ) labels: MutableMapping[str, str] = proto.MapField( proto.STRING, @@ -204,6 +202,7 @@ class CloudVmClusterProperties(proto.Message): cluster_name (str): Optional. OCI Cluster name. """ + class LicenseType(proto.Enum): r"""Different licenses supported. @@ -338,10 +337,10 @@ class State(proto.Enum): proto.STRING, number=16, ) - diagnostics_data_collection_options: 'DataCollectionOptions' = proto.Field( + diagnostics_data_collection_options: "DataCollectionOptions" = proto.Field( proto.MESSAGE, number=19, - message='DataCollectionOptions', + message="DataCollectionOptions", ) state: State = proto.Field( proto.ENUM, diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/mypy.ini b/packages/google-cloud-oracledatabase/mypy.ini similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/mypy.ini rename to packages/google-cloud-oracledatabase/mypy.ini diff --git a/packages/google-cloud-oracledatabase/noxfile.py b/packages/google-cloud-oracledatabase/noxfile.py new file mode 100644 index 000000000000..67b7265f7586 --- /dev/null +++ b/packages/google-cloud-oracledatabase/noxfile.py @@ -0,0 +1,452 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + + +DEFAULT_PYTHON_VERSION = "3.10" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.12") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """Run all tests with prerelease versions of dependencies installed.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "grpc-google-iam-v1", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_autonomous_database_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_autonomous_database_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_autonomous_database_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_autonomous_database_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_exadata_infrastructure_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_exadata_infrastructure_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_exadata_infrastructure_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_exadata_infrastructure_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_vm_cluster_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_vm_cluster_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_vm_cluster_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_vm_cluster_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_autonomous_database_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_autonomous_database_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_autonomous_database_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_autonomous_database_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_exadata_infrastructure_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_exadata_infrastructure_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_exadata_infrastructure_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_exadata_infrastructure_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_vm_cluster_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_vm_cluster_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_vm_cluster_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_vm_cluster_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_generate_autonomous_database_wallet_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_generate_autonomous_database_wallet_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_generate_autonomous_database_wallet_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_generate_autonomous_database_wallet_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_autonomous_database_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_autonomous_database_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_autonomous_database_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_autonomous_database_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_exadata_infrastructure_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_exadata_infrastructure_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_exadata_infrastructure_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_exadata_infrastructure_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_vm_cluster_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_vm_cluster_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_vm_cluster_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_vm_cluster_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_backups_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_backups_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_backups_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_backups_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_character_sets_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_character_sets_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_character_sets_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_character_sets_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_databases_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_databases_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_databases_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_databases_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_db_versions_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_db_versions_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_db_versions_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_db_versions_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_exadata_infrastructures_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_exadata_infrastructures_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_exadata_infrastructures_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_exadata_infrastructures_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_vm_clusters_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_vm_clusters_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_vm_clusters_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_vm_clusters_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_nodes_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_nodes_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_nodes_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_nodes_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_servers_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_servers_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_servers_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_servers_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_system_shapes_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_system_shapes_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_system_shapes_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_system_shapes_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_entitlements_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_entitlements_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_entitlements_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_entitlements_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_gi_versions_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_gi_versions_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_gi_versions_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_gi_versions_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_restore_autonomous_database_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_restore_autonomous_database_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_restore_autonomous_database_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_restore_autonomous_database_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/snippet_metadata_google.cloud.oracledatabase.v1.json b/packages/google-cloud-oracledatabase/samples/generated_samples/snippet_metadata_google.cloud.oracledatabase.v1.json similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/snippet_metadata_google.cloud.oracledatabase.v1.json rename to packages/google-cloud-oracledatabase/samples/generated_samples/snippet_metadata_google.cloud.oracledatabase.v1.json diff --git a/packages/google-cloud-oracledatabase/scripts/decrypt-secrets.sh b/packages/google-cloud-oracledatabase/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-cloud-oracledatabase/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/scripts/fixup_oracledatabase_v1_keywords.py b/packages/google-cloud-oracledatabase/scripts/fixup_oracledatabase_v1_keywords.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/scripts/fixup_oracledatabase_v1_keywords.py rename to packages/google-cloud-oracledatabase/scripts/fixup_oracledatabase_v1_keywords.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/setup.py b/packages/google-cloud-oracledatabase/setup.py similarity index 93% rename from owl-bot-staging/google-cloud-oracledatabase/v1/setup.py rename to packages/google-cloud-oracledatabase/setup.py index ac8389232396..f4dfafa62eff 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/setup.py +++ b/packages/google-cloud-oracledatabase/setup.py @@ -17,20 +17,22 @@ import os import re -import setuptools # type: ignore +import setuptools # type: ignore package_root = os.path.abspath(os.path.dirname(__file__)) -name = 'google-cloud-oracledatabase' +name = "google-cloud-oracledatabase" description = "Google Cloud Oracledatabase API client library" version = None -with open(os.path.join(package_root, 'google/cloud/oracledatabase/gapic_version.py')) as fp: +with open( + os.path.join(package_root, "google/cloud/oracledatabase/gapic_version.py") +) as fp: version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) + assert len(version_candidates) == 1 version = version_candidates[0] if version[0] == "0": diff --git a/packages/google-cloud-oracledatabase/testing/.gitignore b/packages/google-cloud-oracledatabase/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-oracledatabase/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.10.txt b/packages/google-cloud-oracledatabase/testing/constraints-3.10.txt similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.10.txt rename to packages/google-cloud-oracledatabase/testing/constraints-3.10.txt diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.11.txt b/packages/google-cloud-oracledatabase/testing/constraints-3.11.txt similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.11.txt rename to packages/google-cloud-oracledatabase/testing/constraints-3.11.txt diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.12.txt b/packages/google-cloud-oracledatabase/testing/constraints-3.12.txt similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.12.txt rename to packages/google-cloud-oracledatabase/testing/constraints-3.12.txt diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.7.txt b/packages/google-cloud-oracledatabase/testing/constraints-3.7.txt similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.7.txt rename to packages/google-cloud-oracledatabase/testing/constraints-3.7.txt diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.8.txt b/packages/google-cloud-oracledatabase/testing/constraints-3.8.txt similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.8.txt rename to packages/google-cloud-oracledatabase/testing/constraints-3.8.txt diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.9.txt b/packages/google-cloud-oracledatabase/testing/constraints-3.9.txt similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.9.txt rename to packages/google-cloud-oracledatabase/testing/constraints-3.9.txt diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/tests/__init__.py b/packages/google-cloud-oracledatabase/tests/__init__.py similarity index 99% rename from owl-bot-staging/google-cloud-oracledatabase/v1/tests/__init__.py rename to packages/google-cloud-oracledatabase/tests/__init__.py index 7b3de3117f38..8f6cf068242c 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/tests/__init__.py +++ b/packages/google-cloud-oracledatabase/tests/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2024 Google LLC # diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/__init__.py b/packages/google-cloud-oracledatabase/tests/unit/__init__.py similarity index 99% rename from owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/__init__.py rename to packages/google-cloud-oracledatabase/tests/unit/__init__.py index 7b3de3117f38..8f6cf068242c 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/__init__.py +++ b/packages/google-cloud-oracledatabase/tests/unit/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2024 Google LLC # diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/oracledatabase_v1/__init__.py b/packages/google-cloud-oracledatabase/tests/unit/gapic/__init__.py similarity index 99% rename from owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/oracledatabase_v1/__init__.py rename to packages/google-cloud-oracledatabase/tests/unit/gapic/__init__.py index 7b3de3117f38..8f6cf068242c 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/oracledatabase_v1/__init__.py +++ b/packages/google-cloud-oracledatabase/tests/unit/gapic/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2024 Google LLC # diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/__init__.py b/packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1/__init__.py similarity index 99% rename from owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/__init__.py rename to packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1/__init__.py index 7b3de3117f38..8f6cf068242c 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/__init__.py +++ b/packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2024 Google LLC # diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/oracledatabase_v1/test_oracle_database.py b/packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1/test_oracle_database.py similarity index 63% rename from owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/oracledatabase_v1/test_oracle_database.py rename to packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1/test_oracle_database.py index 7ad6633e7c25..05cfe6d9f132 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/oracledatabase_v1/test_oracle_database.py +++ b/packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1/test_oracle_database.py @@ -14,6 +14,7 @@ # limitations under the License. # import os + # try/except added for compatibility with python < 3.8 try: from unittest import mock @@ -21,78 +22,94 @@ except ImportError: # pragma: NO COVER import mock -import grpc -from grpc.experimental import aio from collections.abc import Iterable -from google.protobuf import json_format import json import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format -from google.api_core import client_options +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import api_core_version, client_options from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template from google.api_core import retry as retries +import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.location import locations_pb2 -from google.cloud.oracledatabase_v1.services.oracle_database import OracleDatabaseClient -from google.cloud.oracledatabase_v1.services.oracle_database import pagers -from google.cloud.oracledatabase_v1.services.oracle_database import transports -from google.cloud.oracledatabase_v1.types import autonomous_database -from google.cloud.oracledatabase_v1.types import autonomous_database as gco_autonomous_database -from google.cloud.oracledatabase_v1.types import autonomous_database_character_set -from google.cloud.oracledatabase_v1.types import autonomous_db_backup -from google.cloud.oracledatabase_v1.types import autonomous_db_version -from google.cloud.oracledatabase_v1.types import common -from google.cloud.oracledatabase_v1.types import db_node -from google.cloud.oracledatabase_v1.types import db_server -from google.cloud.oracledatabase_v1.types import db_system_shape -from google.cloud.oracledatabase_v1.types import entitlement -from google.cloud.oracledatabase_v1.types import exadata_infra -from google.cloud.oracledatabase_v1.types import gi_version -from google.cloud.oracledatabase_v1.types import oracledatabase -from google.cloud.oracledatabase_v1.types import vm_cluster -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore from google.type import datetime_pb2 # type: ignore from google.type import dayofweek_pb2 # type: ignore from google.type import month_pb2 # type: ignore from google.type import timeofday_pb2 # type: ignore -import google.auth +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.oracledatabase_v1.services.oracle_database import ( + OracleDatabaseClient, + pagers, + transports, +) +from google.cloud.oracledatabase_v1.types import ( + autonomous_database_character_set, + autonomous_db_backup, + autonomous_db_version, + common, + db_node, + db_server, + db_system_shape, + entitlement, + exadata_infra, + gi_version, + oracledatabase, + vm_cluster, +) +from google.cloud.oracledatabase_v1.types import ( + autonomous_database as gco_autonomous_database, +) +from google.cloud.oracledatabase_v1.types import autonomous_database def client_cert_source_callback(): return b"cert bytes", b"key bytes" + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + # If default endpoint template is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint template so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) def test__get_default_mtls_endpoint(): @@ -103,98 +120,219 @@ def test__get_default_mtls_endpoint(): non_googleapi = "api.example.com" assert OracleDatabaseClient._get_default_mtls_endpoint(None) is None - assert OracleDatabaseClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert OracleDatabaseClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert OracleDatabaseClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert OracleDatabaseClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert OracleDatabaseClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + assert ( + OracleDatabaseClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + OracleDatabaseClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + OracleDatabaseClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + OracleDatabaseClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + OracleDatabaseClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + def test__read_environment_variables(): assert OracleDatabaseClient._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert OracleDatabaseClient._read_environment_variables() == (True, "auto", None) + assert OracleDatabaseClient._read_environment_variables() == ( + True, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert OracleDatabaseClient._read_environment_variables() == (False, "auto", None) + assert OracleDatabaseClient._read_environment_variables() == ( + False, + "auto", + None, + ) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): with pytest.raises(ValueError) as excinfo: OracleDatabaseClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert OracleDatabaseClient._read_environment_variables() == (False, "never", None) + assert OracleDatabaseClient._read_environment_variables() == ( + False, + "never", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert OracleDatabaseClient._read_environment_variables() == (False, "always", None) + assert OracleDatabaseClient._read_environment_variables() == ( + False, + "always", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert OracleDatabaseClient._read_environment_variables() == (False, "auto", None) + assert OracleDatabaseClient._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: OracleDatabaseClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert OracleDatabaseClient._read_environment_variables() == (False, "auto", "foo.com") + assert OracleDatabaseClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() assert OracleDatabaseClient._get_client_cert_source(None, False) is None - assert OracleDatabaseClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert OracleDatabaseClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + assert ( + OracleDatabaseClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + OracleDatabaseClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + OracleDatabaseClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + OracleDatabaseClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert OracleDatabaseClient._get_client_cert_source(None, True) is mock_default_cert_source - assert OracleDatabaseClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source -@mock.patch.object(OracleDatabaseClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(OracleDatabaseClient)) +@mock.patch.object( + OracleDatabaseClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OracleDatabaseClient), +) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() default_universe = OracleDatabaseClient._DEFAULT_UNIVERSE - default_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) - assert OracleDatabaseClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert OracleDatabaseClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == OracleDatabaseClient.DEFAULT_MTLS_ENDPOINT - assert OracleDatabaseClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert OracleDatabaseClient._get_api_endpoint(None, None, default_universe, "always") == OracleDatabaseClient.DEFAULT_MTLS_ENDPOINT - assert OracleDatabaseClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == OracleDatabaseClient.DEFAULT_MTLS_ENDPOINT - assert OracleDatabaseClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert OracleDatabaseClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + assert ( + OracleDatabaseClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + OracleDatabaseClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == OracleDatabaseClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + OracleDatabaseClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + OracleDatabaseClient._get_api_endpoint(None, None, default_universe, "always") + == OracleDatabaseClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + OracleDatabaseClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == OracleDatabaseClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + OracleDatabaseClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + OracleDatabaseClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) with pytest.raises(MutualTLSChannelError) as excinfo: - OracleDatabaseClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + OracleDatabaseClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) def test__get_universe_domain(): client_universe_domain = "foo.com" universe_domain_env = "bar.com" - assert OracleDatabaseClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert OracleDatabaseClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert OracleDatabaseClient._get_universe_domain(None, None) == OracleDatabaseClient._DEFAULT_UNIVERSE + assert ( + OracleDatabaseClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + OracleDatabaseClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + OracleDatabaseClient._get_universe_domain(None, None) + == OracleDatabaseClient._DEFAULT_UNIVERSE + ) with pytest.raises(ValueError) as excinfo: OracleDatabaseClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest"), -]) + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest"), + ], +) def test__validate_universe_domain(client_class, transport_class, transport_name): client = client_class( - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) ) assert client._validate_universe_domain() == True @@ -204,15 +342,19 @@ def test__validate_universe_domain(client_class, transport_class, transport_name if transport_name == "grpc": # Test the case where credentials are provided by the # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) client = client_class(transport=transport_class(channel=channel)) assert client._validate_universe_domain() == True # Test the case where credentials do not exist: e.g. a transport is provided # with no credentials. Validation should still succeed because there is no # mismatch with non-existent credentials. - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - transport=transport_class(channel=channel) + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) transport._credentials = None client = client_class(transport=transport) assert client._validate_universe_domain() == True @@ -220,40 +362,58 @@ def test__validate_universe_domain(client_class, transport_class, transport_name # TODO: This is needed to cater for older versions of google-auth # Make this test unconditional once the minimum supported version of # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]] + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): credentials = ga_credentials.AnonymousCredentials() credentials._universe_domain = "foo.com" # Test the case when there is a universe mismatch from the credentials. - client = client_class( - transport=transport_class(credentials=credentials) - ) + client = client_class(transport=transport_class(credentials=credentials)) with pytest.raises(ValueError) as excinfo: client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) # Test the case when there is a universe mismatch from the client. # # TODO: Make this test unconditional once the minimum supported version of # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]] + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) with pytest.raises(ValueError) as excinfo: client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) # Test that ValueError is raised if universe_domain is provided via client options and credentials is None with pytest.raises(ValueError): client._compare_universes("foo.bar", None) -@pytest.mark.parametrize("client_class,transport_name", [ - (OracleDatabaseClient, "rest"), -]) +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (OracleDatabaseClient, "rest"), + ], +) def test_oracle_database_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: factory.return_value = creds info = {"valid": True} client = client_class.from_service_account_info(info, transport=transport_name) @@ -261,48 +421,64 @@ def test_oracle_database_client_from_service_account_info(client_class, transpor assert isinstance(client, client_class) assert client.transport._host == ( - 'oracledatabase.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://oracledatabase.googleapis.com' + "oracledatabase.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://oracledatabase.googleapis.com" ) -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.OracleDatabaseRestTransport, "rest"), -]) -def test_oracle_database_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.OracleDatabaseRestTransport, "rest"), + ], +) +def test_oracle_database_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=False) use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class,transport_name", [ - (OracleDatabaseClient, "rest"), -]) +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (OracleDatabaseClient, "rest"), + ], +) def test_oracle_database_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) assert client.transport._host == ( - 'oracledatabase.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://oracledatabase.googleapis.com' + "oracledatabase.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://oracledatabase.googleapis.com" ) @@ -317,27 +493,34 @@ def test_oracle_database_client_get_transport_class(): assert transport == transports.OracleDatabaseRestTransport -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest"), -]) -@mock.patch.object(OracleDatabaseClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(OracleDatabaseClient)) -def test_oracle_database_client_client_options(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest"), + ], +) +@mock.patch.object( + OracleDatabaseClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OracleDatabaseClient), +) +def test_oracle_database_client_client_options( + client_class, transport_class, transport_name +): # Check that if channel is provided we won't create a new one. - with mock.patch.object(OracleDatabaseClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) + with mock.patch.object(OracleDatabaseClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch.object(OracleDatabaseClient, 'get_transport_class') as gtc: + with mock.patch.object(OracleDatabaseClient, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( @@ -355,13 +538,15 @@ def test_oracle_database_client_client_options(client_class, transport_class, tr # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -373,7 +558,7 @@ def test_oracle_database_client_client_options(client_class, transport_class, tr # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( @@ -393,23 +578,33 @@ def test_oracle_database_client_client_options(client_class, transport_class, tr with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -418,43 +613,63 @@ def test_oracle_database_client_client_options(client_class, transport_class, tr api_audience=None, ) # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest", "true"), - (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest", "false"), -]) -@mock.patch.object(OracleDatabaseClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(OracleDatabaseClient)) + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest", "true"), + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + OracleDatabaseClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OracleDatabaseClient), +) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_oracle_database_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): +def test_oracle_database_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. # Check the case client_cert_source is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -473,12 +688,22 @@ def test_oracle_database_client_mtls_env_auto(client_class, transport_class, tra # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -499,15 +724,22 @@ def test_oracle_database_client_mtls_env_auto(client_class, transport_class, tra ) # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -517,18 +749,24 @@ def test_oracle_database_client_mtls_env_auto(client_class, transport_class, tra ) -@pytest.mark.parametrize("client_class", [ - OracleDatabaseClient -]) -@mock.patch.object(OracleDatabaseClient, "DEFAULT_ENDPOINT", modify_default_endpoint(OracleDatabaseClient)) +@pytest.mark.parametrize("client_class", [OracleDatabaseClient]) +@mock.patch.object( + OracleDatabaseClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(OracleDatabaseClient), +) def test_oracle_database_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source == mock_client_cert_source @@ -536,8 +774,12 @@ def test_oracle_database_client_get_mtls_endpoint_and_cert_source(client_class): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source is None @@ -555,16 +797,28 @@ def test_oracle_database_client_get_mtls_endpoint_and_cert_source(client_class): # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_ENDPOINT assert cert_source is None # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source @@ -574,33 +828,55 @@ def test_oracle_database_client_get_mtls_endpoint_and_cert_source(client_class): with pytest.raises(MutualTLSChannelError) as excinfo: client_class.get_mtls_endpoint_and_cert_source() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): with pytest.raises(ValueError) as excinfo: client_class.get_mtls_endpoint_and_cert_source() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + -@pytest.mark.parametrize("client_class", [ - OracleDatabaseClient -]) -@mock.patch.object(OracleDatabaseClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(OracleDatabaseClient)) +@pytest.mark.parametrize("client_class", [OracleDatabaseClient]) +@mock.patch.object( + OracleDatabaseClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OracleDatabaseClient), +) def test_oracle_database_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" default_universe = OracleDatabaseClient._DEFAULT_UNIVERSE - default_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", @@ -623,11 +899,19 @@ def test_oracle_database_client_client_api_endpoint(client_class): universe_exists = hasattr(options, "universe_domain") if universe_exists: options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. @@ -635,25 +919,34 @@ def test_oracle_database_client_client_api_endpoint(client_class): if hasattr(options, "universe_domain"): delattr(options, "universe_domain") with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) assert client.api_endpoint == default_endpoint -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest"), -]) -def test_oracle_database_client_client_options_scopes(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest"), + ], +) +def test_oracle_database_client_client_options_scopes( + client_class, transport_class, transport_name +): # Check the case scopes are provided. options = client_options.ClientOptions( scopes=["1", "2"], ) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -662,22 +955,28 @@ def test_oracle_database_client_client_options_scopes(client_class, transport_cl api_audience=None, ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest", None), -]) -def test_oracle_database_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest", None), + ], +) +def test_oracle_database_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -687,10 +986,13 @@ def test_oracle_database_client_client_options_credentials_file(client_class, tr ) -@pytest.mark.parametrize("request_type", [ - oracledatabase.ListCloudExadataInfrastructuresRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListCloudExadataInfrastructuresRequest, + dict, + ], +) def test_list_cloud_exadata_infrastructures_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -698,30 +1000,33 @@ def test_list_cloud_exadata_infrastructures_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListCloudExadataInfrastructuresResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = oracledatabase.ListCloudExadataInfrastructuresResponse.pb(return_value) + return_value = oracledatabase.ListCloudExadataInfrastructuresResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_cloud_exadata_infrastructures(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListCloudExadataInfrastructuresPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" + def test_list_cloud_exadata_infrastructures_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -737,12 +1042,19 @@ def test_list_cloud_exadata_infrastructures_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_cloud_exadata_infrastructures in client._transport._wrapped_methods + assert ( + client._transport.list_cloud_exadata_infrastructures + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_cloud_exadata_infrastructures] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_cloud_exadata_infrastructures + ] = mock_rpc request = {} client.list_cloud_exadata_infrastructures(request) @@ -757,57 +1069,67 @@ def test_list_cloud_exadata_infrastructures_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_cloud_exadata_infrastructures_rest_required_fields(request_type=oracledatabase.ListCloudExadataInfrastructuresRequest): +def test_list_cloud_exadata_infrastructures_rest_required_fields( + request_type=oracledatabase.ListCloudExadataInfrastructuresRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_cloud_exadata_infrastructures._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_cloud_exadata_infrastructures._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_cloud_exadata_infrastructures._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_cloud_exadata_infrastructures._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = oracledatabase.ListCloudExadataInfrastructuresResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -815,42 +1137,65 @@ def test_list_cloud_exadata_infrastructures_rest_required_fields(request_type=or response_value.status_code = 200 # Convert return value to protobuf type - return_value = oracledatabase.ListCloudExadataInfrastructuresResponse.pb(return_value) + return_value = oracledatabase.ListCloudExadataInfrastructuresResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_cloud_exadata_infrastructures(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_cloud_exadata_infrastructures_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - unset_fields = transport.list_cloud_exadata_infrastructures._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + unset_fields = ( + transport.list_cloud_exadata_infrastructures._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_cloud_exadata_infrastructures_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_cloud_exadata_infrastructures") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_cloud_exadata_infrastructures") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "post_list_cloud_exadata_infrastructures", + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "pre_list_cloud_exadata_infrastructures", + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.ListCloudExadataInfrastructuresRequest.pb(oracledatabase.ListCloudExadataInfrastructuresRequest()) + pb_message = oracledatabase.ListCloudExadataInfrastructuresRequest.pb( + oracledatabase.ListCloudExadataInfrastructuresRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -861,34 +1206,49 @@ def test_list_cloud_exadata_infrastructures_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = oracledatabase.ListCloudExadataInfrastructuresResponse.to_json(oracledatabase.ListCloudExadataInfrastructuresResponse()) + req.return_value._content = ( + oracledatabase.ListCloudExadataInfrastructuresResponse.to_json( + oracledatabase.ListCloudExadataInfrastructuresResponse() + ) + ) request = oracledatabase.ListCloudExadataInfrastructuresRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = oracledatabase.ListCloudExadataInfrastructuresResponse() - client.list_cloud_exadata_infrastructures(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_cloud_exadata_infrastructures( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_list_cloud_exadata_infrastructures_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListCloudExadataInfrastructuresRequest): +def test_list_cloud_exadata_infrastructures_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.ListCloudExadataInfrastructuresRequest, +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -904,16 +1264,16 @@ def test_list_cloud_exadata_infrastructures_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListCloudExadataInfrastructuresResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -921,9 +1281,11 @@ def test_list_cloud_exadata_infrastructures_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = oracledatabase.ListCloudExadataInfrastructuresResponse.pb(return_value) + return_value = oracledatabase.ListCloudExadataInfrastructuresResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.list_cloud_exadata_infrastructures(**mock_args) @@ -932,10 +1294,16 @@ def test_list_cloud_exadata_infrastructures_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/cloudExadataInfrastructures" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/cloudExadataInfrastructures" + % client.transport._host, + args[1], + ) -def test_list_cloud_exadata_infrastructures_rest_flattened_error(transport: str = 'rest'): +def test_list_cloud_exadata_infrastructures_rest_flattened_error( + transport: str = "rest", +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -946,20 +1314,20 @@ def test_list_cloud_exadata_infrastructures_rest_flattened_error(transport: str with pytest.raises(ValueError): client.list_cloud_exadata_infrastructures( oracledatabase.ListCloudExadataInfrastructuresRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_cloud_exadata_infrastructures_rest_pager(transport: str = 'rest'): +def test_list_cloud_exadata_infrastructures_rest_pager(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( oracledatabase.ListCloudExadataInfrastructuresResponse( @@ -968,17 +1336,17 @@ def test_list_cloud_exadata_infrastructures_rest_pager(transport: str = 'rest'): exadata_infra.CloudExadataInfrastructure(), exadata_infra.CloudExadataInfrastructure(), ], - next_page_token='abc', + next_page_token="abc", ), oracledatabase.ListCloudExadataInfrastructuresResponse( cloud_exadata_infrastructures=[], - next_page_token='def', + next_page_token="def", ), oracledatabase.ListCloudExadataInfrastructuresResponse( cloud_exadata_infrastructures=[ exadata_infra.CloudExadataInfrastructure(), ], - next_page_token='ghi', + next_page_token="ghi", ), oracledatabase.ListCloudExadataInfrastructuresResponse( cloud_exadata_infrastructures=[ @@ -991,31 +1359,40 @@ def test_list_cloud_exadata_infrastructures_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(oracledatabase.ListCloudExadataInfrastructuresResponse.to_json(x) for x in response) + response = tuple( + oracledatabase.ListCloudExadataInfrastructuresResponse.to_json(x) + for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} pager = client.list_cloud_exadata_infrastructures(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, exadata_infra.CloudExadataInfrastructure) - for i in results) + assert all( + isinstance(i, exadata_infra.CloudExadataInfrastructure) for i in results + ) - pages = list(client.list_cloud_exadata_infrastructures(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + pages = list( + client.list_cloud_exadata_infrastructures(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - oracledatabase.GetCloudExadataInfrastructureRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.GetCloudExadataInfrastructureRequest, + dict, + ], +) def test_get_cloud_exadata_infrastructure_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1023,17 +1400,19 @@ def test_get_cloud_exadata_infrastructure_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = exadata_infra.CloudExadataInfrastructure( - name='name_value', - display_name='display_name_value', - gcp_oracle_zone='gcp_oracle_zone_value', - entitlement_id='entitlement_id_value', + name="name_value", + display_name="display_name_value", + gcp_oracle_zone="gcp_oracle_zone_value", + entitlement_id="entitlement_id_value", ) # Wrap the value into a proper Response obj @@ -1043,16 +1422,17 @@ def test_get_cloud_exadata_infrastructure_rest(request_type): return_value = exadata_infra.CloudExadataInfrastructure.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_cloud_exadata_infrastructure(request) # Establish that the response is the type that we expect. assert isinstance(response, exadata_infra.CloudExadataInfrastructure) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.gcp_oracle_zone == 'gcp_oracle_zone_value' - assert response.entitlement_id == 'entitlement_id_value' + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.gcp_oracle_zone == "gcp_oracle_zone_value" + assert response.entitlement_id == "entitlement_id_value" + def test_get_cloud_exadata_infrastructure_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1068,12 +1448,19 @@ def test_get_cloud_exadata_infrastructure_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_cloud_exadata_infrastructure in client._transport._wrapped_methods + assert ( + client._transport.get_cloud_exadata_infrastructure + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_cloud_exadata_infrastructure] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_cloud_exadata_infrastructure + ] = mock_rpc request = {} client.get_cloud_exadata_infrastructure(request) @@ -1088,55 +1475,60 @@ def test_get_cloud_exadata_infrastructure_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_get_cloud_exadata_infrastructure_rest_required_fields(request_type=oracledatabase.GetCloudExadataInfrastructureRequest): +def test_get_cloud_exadata_infrastructure_rest_required_fields( + request_type=oracledatabase.GetCloudExadataInfrastructureRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = exadata_infra.CloudExadataInfrastructure() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -1147,39 +1539,51 @@ def test_get_cloud_exadata_infrastructure_rest_required_fields(request_type=orac return_value = exadata_infra.CloudExadataInfrastructure.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_cloud_exadata_infrastructure(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_get_cloud_exadata_infrastructure_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - unset_fields = transport.get_cloud_exadata_infrastructure._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + unset_fields = ( + transport.get_cloud_exadata_infrastructure._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_cloud_exadata_infrastructure_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_get_cloud_exadata_infrastructure") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_get_cloud_exadata_infrastructure") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "post_get_cloud_exadata_infrastructure", + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_get_cloud_exadata_infrastructure" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.GetCloudExadataInfrastructureRequest.pb(oracledatabase.GetCloudExadataInfrastructureRequest()) + pb_message = oracledatabase.GetCloudExadataInfrastructureRequest.pb( + oracledatabase.GetCloudExadataInfrastructureRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -1190,34 +1594,49 @@ def test_get_cloud_exadata_infrastructure_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = exadata_infra.CloudExadataInfrastructure.to_json(exadata_infra.CloudExadataInfrastructure()) + req.return_value._content = exadata_infra.CloudExadataInfrastructure.to_json( + exadata_infra.CloudExadataInfrastructure() + ) request = oracledatabase.GetCloudExadataInfrastructureRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = exadata_infra.CloudExadataInfrastructure() - client.get_cloud_exadata_infrastructure(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_cloud_exadata_infrastructure( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_get_cloud_exadata_infrastructure_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.GetCloudExadataInfrastructureRequest): +def test_get_cloud_exadata_infrastructure_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.GetCloudExadataInfrastructureRequest, +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -1233,16 +1652,18 @@ def test_get_cloud_exadata_infrastructure_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = exadata_infra.CloudExadataInfrastructure() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -1252,7 +1673,7 @@ def test_get_cloud_exadata_infrastructure_rest_flattened(): # Convert return value to protobuf type return_value = exadata_infra.CloudExadataInfrastructure.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.get_cloud_exadata_infrastructure(**mock_args) @@ -1261,10 +1682,14 @@ def test_get_cloud_exadata_infrastructure_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/cloudExadataInfrastructures/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/cloudExadataInfrastructures/*}" + % client.transport._host, + args[1], + ) -def test_get_cloud_exadata_infrastructure_rest_flattened_error(transport: str = 'rest'): +def test_get_cloud_exadata_infrastructure_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1275,21 +1700,23 @@ def test_get_cloud_exadata_infrastructure_rest_flattened_error(transport: str = with pytest.raises(ValueError): client.get_cloud_exadata_infrastructure( oracledatabase.GetCloudExadataInfrastructureRequest(), - name='name_value', + name="name_value", ) def test_get_cloud_exadata_infrastructure_rest_error(): client = OracleDatabaseClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) -@pytest.mark.parametrize("request_type", [ - oracledatabase.CreateCloudExadataInfrastructureRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.CreateCloudExadataInfrastructureRequest, + dict, + ], +) def test_create_cloud_exadata_infrastructure_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1297,14 +1724,62 @@ def test_create_cloud_exadata_infrastructure_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["cloud_exadata_infrastructure"] = {'name': 'name_value', 'display_name': 'display_name_value', 'gcp_oracle_zone': 'gcp_oracle_zone_value', 'entitlement_id': 'entitlement_id_value', 'properties': {'ocid': 'ocid_value', 'compute_count': 1413, 'storage_count': 1405, 'total_storage_size_gb': 2234, 'available_storage_size_gb': 2615, 'maintenance_window': {'preference': 1, 'months': [1], 'weeks_of_month': [1497, 1498], 'days_of_week': [1], 'hours_of_day': [1283, 1284], 'lead_time_week': 1455, 'patching_mode': 1, 'custom_action_timeout_mins': 2804, 'is_custom_action_timeout_enabled': True}, 'state': 1, 'shape': 'shape_value', 'oci_url': 'oci_url_value', 'cpu_count': 976, 'max_cpu_count': 1397, 'memory_size_gb': 1499, 'max_memory_gb': 1382, 'db_node_storage_size_gb': 2401, 'max_db_node_storage_size_gb': 2822, 'data_storage_size_tb': 0.2109, 'max_data_storage_tb': 0.19920000000000002, 'activated_storage_count': 2449, 'additional_storage_count': 2549, 'db_server_version': 'db_server_version_value', 'storage_server_version': 'storage_server_version_value', 'next_maintenance_run_id': 'next_maintenance_run_id_value', 'next_maintenance_run_time': {'seconds': 751, 'nanos': 543}, 'next_security_maintenance_run_time': {}, 'customer_contacts': [{'email': 'email_value'}], 'monthly_storage_server_version': 'monthly_storage_server_version_value', 'monthly_db_server_version': 'monthly_db_server_version_value'}, 'labels': {}, 'create_time': {}} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["cloud_exadata_infrastructure"] = { + "name": "name_value", + "display_name": "display_name_value", + "gcp_oracle_zone": "gcp_oracle_zone_value", + "entitlement_id": "entitlement_id_value", + "properties": { + "ocid": "ocid_value", + "compute_count": 1413, + "storage_count": 1405, + "total_storage_size_gb": 2234, + "available_storage_size_gb": 2615, + "maintenance_window": { + "preference": 1, + "months": [1], + "weeks_of_month": [1497, 1498], + "days_of_week": [1], + "hours_of_day": [1283, 1284], + "lead_time_week": 1455, + "patching_mode": 1, + "custom_action_timeout_mins": 2804, + "is_custom_action_timeout_enabled": True, + }, + "state": 1, + "shape": "shape_value", + "oci_url": "oci_url_value", + "cpu_count": 976, + "max_cpu_count": 1397, + "memory_size_gb": 1499, + "max_memory_gb": 1382, + "db_node_storage_size_gb": 2401, + "max_db_node_storage_size_gb": 2822, + "data_storage_size_tb": 0.2109, + "max_data_storage_tb": 0.19920000000000002, + "activated_storage_count": 2449, + "additional_storage_count": 2549, + "db_server_version": "db_server_version_value", + "storage_server_version": "storage_server_version_value", + "next_maintenance_run_id": "next_maintenance_run_id_value", + "next_maintenance_run_time": {"seconds": 751, "nanos": 543}, + "next_security_maintenance_run_time": {}, + "customer_contacts": [{"email": "email_value"}], + "monthly_storage_server_version": "monthly_storage_server_version_value", + "monthly_db_server_version": "monthly_db_server_version_value", + }, + "labels": {}, + "create_time": {}, + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = oracledatabase.CreateCloudExadataInfrastructureRequest.meta.fields["cloud_exadata_infrastructure"] + test_field = oracledatabase.CreateCloudExadataInfrastructureRequest.meta.fields[ + "cloud_exadata_infrastructure" + ] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -1318,7 +1793,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -1332,7 +1807,9 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["cloud_exadata_infrastructure"].items(): # pragma: NO COVER + for field, value in request_init[ + "cloud_exadata_infrastructure" + ].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -1347,40 +1824,47 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["cloud_exadata_infrastructure"][field])): + for i in range( + 0, len(request_init["cloud_exadata_infrastructure"][field]) + ): del request_init["cloud_exadata_infrastructure"][field][i][subfield] else: del request_init["cloud_exadata_infrastructure"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.create_cloud_exadata_infrastructure(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" + def test_create_cloud_exadata_infrastructure_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1395,12 +1879,19 @@ def test_create_cloud_exadata_infrastructure_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_cloud_exadata_infrastructure in client._transport._wrapped_methods + assert ( + client._transport.create_cloud_exadata_infrastructure + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_cloud_exadata_infrastructure] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_cloud_exadata_infrastructure + ] = mock_rpc request = {} client.create_cloud_exadata_infrastructure(request) @@ -1419,7 +1910,9 @@ def test_create_cloud_exadata_infrastructure_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_create_cloud_exadata_infrastructure_rest_required_fields(request_type=oracledatabase.CreateCloudExadataInfrastructureRequest): +def test_create_cloud_exadata_infrastructure_rest_required_fields( + request_type=oracledatabase.CreateCloudExadataInfrastructureRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} @@ -1427,65 +1920,81 @@ def test_create_cloud_exadata_infrastructure_rest_required_fields(request_type=o request_init["cloud_exadata_infrastructure_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "cloudExadataInfrastructureId" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "cloudExadataInfrastructureId" in jsonified_request - assert jsonified_request["cloudExadataInfrastructureId"] == request_init["cloud_exadata_infrastructure_id"] + assert ( + jsonified_request["cloudExadataInfrastructureId"] + == request_init["cloud_exadata_infrastructure_id"] + ) - jsonified_request["parent"] = 'parent_value' - jsonified_request["cloudExadataInfrastructureId"] = 'cloud_exadata_infrastructure_id_value' + jsonified_request["parent"] = "parent_value" + jsonified_request[ + "cloudExadataInfrastructureId" + ] = "cloud_exadata_infrastructure_id_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("cloud_exadata_infrastructure_id", "request_id", )) + assert not set(unset_fields) - set( + ( + "cloud_exadata_infrastructure_id", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" assert "cloudExadataInfrastructureId" in jsonified_request - assert jsonified_request["cloudExadataInfrastructureId"] == 'cloud_exadata_infrastructure_id_value' + assert ( + jsonified_request["cloudExadataInfrastructureId"] + == "cloud_exadata_infrastructure_id_value" + ) client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.create_cloud_exadata_infrastructure(request) @@ -1495,34 +2004,64 @@ def test_create_cloud_exadata_infrastructure_rest_required_fields(request_type=o "cloudExadataInfrastructureId", "", ), - ('$alt', 'json;enum-encoding=int') + ("$alt", "json;enum-encoding=int"), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_create_cloud_exadata_infrastructure_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - unset_fields = transport.create_cloud_exadata_infrastructure._get_unset_required_fields({}) - assert set(unset_fields) == (set(("cloudExadataInfrastructureId", "requestId", )) & set(("parent", "cloudExadataInfrastructureId", "cloudExadataInfrastructure", ))) + unset_fields = ( + transport.create_cloud_exadata_infrastructure._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "cloudExadataInfrastructureId", + "requestId", + ) + ) + & set( + ( + "parent", + "cloudExadataInfrastructureId", + "cloudExadataInfrastructure", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_create_cloud_exadata_infrastructure_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_create_cloud_exadata_infrastructure") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_create_cloud_exadata_infrastructure") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "post_create_cloud_exadata_infrastructure", + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "pre_create_cloud_exadata_infrastructure", + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.CreateCloudExadataInfrastructureRequest.pb(oracledatabase.CreateCloudExadataInfrastructureRequest()) + pb_message = oracledatabase.CreateCloudExadataInfrastructureRequest.pb( + oracledatabase.CreateCloudExadataInfrastructureRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -1533,34 +2072,47 @@ def test_create_cloud_exadata_infrastructure_rest_interceptors(null_interceptor) req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) request = oracledatabase.CreateCloudExadataInfrastructureRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_cloud_exadata_infrastructure(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.create_cloud_exadata_infrastructure( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_create_cloud_exadata_infrastructure_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.CreateCloudExadataInfrastructureRequest): +def test_create_cloud_exadata_infrastructure_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.CreateCloudExadataInfrastructureRequest, +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -1576,18 +2128,20 @@ def test_create_cloud_exadata_infrastructure_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', - cloud_exadata_infrastructure=exadata_infra.CloudExadataInfrastructure(name='name_value'), - cloud_exadata_infrastructure_id='cloud_exadata_infrastructure_id_value', + parent="parent_value", + cloud_exadata_infrastructure=exadata_infra.CloudExadataInfrastructure( + name="name_value" + ), + cloud_exadata_infrastructure_id="cloud_exadata_infrastructure_id_value", ) mock_args.update(sample_request) @@ -1595,7 +2149,7 @@ def test_create_cloud_exadata_infrastructure_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.create_cloud_exadata_infrastructure(**mock_args) @@ -1604,10 +2158,16 @@ def test_create_cloud_exadata_infrastructure_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/cloudExadataInfrastructures" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/cloudExadataInfrastructures" + % client.transport._host, + args[1], + ) -def test_create_cloud_exadata_infrastructure_rest_flattened_error(transport: str = 'rest'): +def test_create_cloud_exadata_infrastructure_rest_flattened_error( + transport: str = "rest", +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1618,23 +2178,27 @@ def test_create_cloud_exadata_infrastructure_rest_flattened_error(transport: str with pytest.raises(ValueError): client.create_cloud_exadata_infrastructure( oracledatabase.CreateCloudExadataInfrastructureRequest(), - parent='parent_value', - cloud_exadata_infrastructure=exadata_infra.CloudExadataInfrastructure(name='name_value'), - cloud_exadata_infrastructure_id='cloud_exadata_infrastructure_id_value', + parent="parent_value", + cloud_exadata_infrastructure=exadata_infra.CloudExadataInfrastructure( + name="name_value" + ), + cloud_exadata_infrastructure_id="cloud_exadata_infrastructure_id_value", ) def test_create_cloud_exadata_infrastructure_rest_error(): client = OracleDatabaseClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) -@pytest.mark.parametrize("request_type", [ - oracledatabase.DeleteCloudExadataInfrastructureRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.DeleteCloudExadataInfrastructureRequest, + dict, + ], +) def test_delete_cloud_exadata_infrastructure_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1642,26 +2206,29 @@ def test_delete_cloud_exadata_infrastructure_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_cloud_exadata_infrastructure(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" + def test_delete_cloud_exadata_infrastructure_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1676,12 +2243,19 @@ def test_delete_cloud_exadata_infrastructure_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_cloud_exadata_infrastructure in client._transport._wrapped_methods + assert ( + client._transport.delete_cloud_exadata_infrastructure + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_cloud_exadata_infrastructure] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_cloud_exadata_infrastructure + ] = mock_rpc request = {} client.delete_cloud_exadata_infrastructure(request) @@ -1700,57 +2274,67 @@ def test_delete_cloud_exadata_infrastructure_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_delete_cloud_exadata_infrastructure_rest_required_fields(request_type=oracledatabase.DeleteCloudExadataInfrastructureRequest): +def test_delete_cloud_exadata_infrastructure_rest_required_fields( + request_type=oracledatabase.DeleteCloudExadataInfrastructureRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("force", "request_id", )) + assert not set(unset_fields) - set( + ( + "force", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -1758,40 +2342,62 @@ def test_delete_cloud_exadata_infrastructure_rest_required_fields(request_type=o response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_cloud_exadata_infrastructure(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_delete_cloud_exadata_infrastructure_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - unset_fields = transport.delete_cloud_exadata_infrastructure._get_unset_required_fields({}) - assert set(unset_fields) == (set(("force", "requestId", )) & set(("name", ))) + unset_fields = ( + transport.delete_cloud_exadata_infrastructure._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "force", + "requestId", + ) + ) + & set(("name",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_delete_cloud_exadata_infrastructure_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_delete_cloud_exadata_infrastructure") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_delete_cloud_exadata_infrastructure") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "post_delete_cloud_exadata_infrastructure", + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "pre_delete_cloud_exadata_infrastructure", + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.DeleteCloudExadataInfrastructureRequest.pb(oracledatabase.DeleteCloudExadataInfrastructureRequest()) + pb_message = oracledatabase.DeleteCloudExadataInfrastructureRequest.pb( + oracledatabase.DeleteCloudExadataInfrastructureRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -1802,34 +2408,49 @@ def test_delete_cloud_exadata_infrastructure_rest_interceptors(null_interceptor) req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) request = oracledatabase.DeleteCloudExadataInfrastructureRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_cloud_exadata_infrastructure(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.delete_cloud_exadata_infrastructure( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_delete_cloud_exadata_infrastructure_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.DeleteCloudExadataInfrastructureRequest): +def test_delete_cloud_exadata_infrastructure_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.DeleteCloudExadataInfrastructureRequest, +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -1845,16 +2466,18 @@ def test_delete_cloud_exadata_infrastructure_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -1862,7 +2485,7 @@ def test_delete_cloud_exadata_infrastructure_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.delete_cloud_exadata_infrastructure(**mock_args) @@ -1871,10 +2494,16 @@ def test_delete_cloud_exadata_infrastructure_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/cloudExadataInfrastructures/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/cloudExadataInfrastructures/*}" + % client.transport._host, + args[1], + ) -def test_delete_cloud_exadata_infrastructure_rest_flattened_error(transport: str = 'rest'): +def test_delete_cloud_exadata_infrastructure_rest_flattened_error( + transport: str = "rest", +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1885,21 +2514,23 @@ def test_delete_cloud_exadata_infrastructure_rest_flattened_error(transport: str with pytest.raises(ValueError): client.delete_cloud_exadata_infrastructure( oracledatabase.DeleteCloudExadataInfrastructureRequest(), - name='name_value', + name="name_value", ) def test_delete_cloud_exadata_infrastructure_rest_error(): client = OracleDatabaseClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) -@pytest.mark.parametrize("request_type", [ - oracledatabase.ListCloudVmClustersRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListCloudVmClustersRequest, + dict, + ], +) def test_list_cloud_vm_clusters_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1907,14 +2538,14 @@ def test_list_cloud_vm_clusters_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListCloudVmClustersResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -1924,13 +2555,14 @@ def test_list_cloud_vm_clusters_rest(request_type): return_value = oracledatabase.ListCloudVmClustersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_cloud_vm_clusters(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListCloudVmClustersPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" + def test_list_cloud_vm_clusters_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1946,12 +2578,19 @@ def test_list_cloud_vm_clusters_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_cloud_vm_clusters in client._transport._wrapped_methods + assert ( + client._transport.list_cloud_vm_clusters + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_cloud_vm_clusters] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_cloud_vm_clusters + ] = mock_rpc request = {} client.list_cloud_vm_clusters(request) @@ -1966,57 +2605,68 @@ def test_list_cloud_vm_clusters_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_cloud_vm_clusters_rest_required_fields(request_type=oracledatabase.ListCloudVmClustersRequest): +def test_list_cloud_vm_clusters_rest_required_fields( + request_type=oracledatabase.ListCloudVmClustersRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_cloud_vm_clusters._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_cloud_vm_clusters._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_cloud_vm_clusters._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_cloud_vm_clusters._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = oracledatabase.ListCloudVmClustersResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -2027,39 +2677,57 @@ def test_list_cloud_vm_clusters_rest_required_fields(request_type=oracledatabase return_value = oracledatabase.ListCloudVmClustersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_cloud_vm_clusters(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_cloud_vm_clusters_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_cloud_vm_clusters._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_cloud_vm_clusters_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_cloud_vm_clusters") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_cloud_vm_clusters") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_cloud_vm_clusters" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_cloud_vm_clusters" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.ListCloudVmClustersRequest.pb(oracledatabase.ListCloudVmClustersRequest()) + pb_message = oracledatabase.ListCloudVmClustersRequest.pb( + oracledatabase.ListCloudVmClustersRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -2070,34 +2738,46 @@ def test_list_cloud_vm_clusters_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = oracledatabase.ListCloudVmClustersResponse.to_json(oracledatabase.ListCloudVmClustersResponse()) + req.return_value._content = oracledatabase.ListCloudVmClustersResponse.to_json( + oracledatabase.ListCloudVmClustersResponse() + ) request = oracledatabase.ListCloudVmClustersRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = oracledatabase.ListCloudVmClustersResponse() - client.list_cloud_vm_clusters(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_cloud_vm_clusters( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_list_cloud_vm_clusters_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListCloudVmClustersRequest): +def test_list_cloud_vm_clusters_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListCloudVmClustersRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -2113,16 +2793,16 @@ def test_list_cloud_vm_clusters_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListCloudVmClustersResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -2132,7 +2812,7 @@ def test_list_cloud_vm_clusters_rest_flattened(): # Convert return value to protobuf type return_value = oracledatabase.ListCloudVmClustersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.list_cloud_vm_clusters(**mock_args) @@ -2141,10 +2821,14 @@ def test_list_cloud_vm_clusters_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/cloudVmClusters" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/cloudVmClusters" + % client.transport._host, + args[1], + ) -def test_list_cloud_vm_clusters_rest_flattened_error(transport: str = 'rest'): +def test_list_cloud_vm_clusters_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2155,20 +2839,20 @@ def test_list_cloud_vm_clusters_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_cloud_vm_clusters( oracledatabase.ListCloudVmClustersRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_cloud_vm_clusters_rest_pager(transport: str = 'rest'): +def test_list_cloud_vm_clusters_rest_pager(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( oracledatabase.ListCloudVmClustersResponse( @@ -2177,17 +2861,17 @@ def test_list_cloud_vm_clusters_rest_pager(transport: str = 'rest'): vm_cluster.CloudVmCluster(), vm_cluster.CloudVmCluster(), ], - next_page_token='abc', + next_page_token="abc", ), oracledatabase.ListCloudVmClustersResponse( cloud_vm_clusters=[], - next_page_token='def', + next_page_token="def", ), oracledatabase.ListCloudVmClustersResponse( cloud_vm_clusters=[ vm_cluster.CloudVmCluster(), ], - next_page_token='ghi', + next_page_token="ghi", ), oracledatabase.ListCloudVmClustersResponse( cloud_vm_clusters=[ @@ -2200,31 +2884,35 @@ def test_list_cloud_vm_clusters_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(oracledatabase.ListCloudVmClustersResponse.to_json(x) for x in response) + response = tuple( + oracledatabase.ListCloudVmClustersResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} pager = client.list_cloud_vm_clusters(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, vm_cluster.CloudVmCluster) - for i in results) + assert all(isinstance(i, vm_cluster.CloudVmCluster) for i in results) pages = list(client.list_cloud_vm_clusters(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - oracledatabase.GetCloudVmClusterRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.GetCloudVmClusterRequest, + dict, + ], +) def test_get_cloud_vm_cluster_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2232,20 +2920,22 @@ def test_get_cloud_vm_cluster_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = vm_cluster.CloudVmCluster( - name='name_value', - exadata_infrastructure='exadata_infrastructure_value', - display_name='display_name_value', - gcp_oracle_zone='gcp_oracle_zone_value', - cidr='cidr_value', - backup_subnet_cidr='backup_subnet_cidr_value', - network='network_value', + name="name_value", + exadata_infrastructure="exadata_infrastructure_value", + display_name="display_name_value", + gcp_oracle_zone="gcp_oracle_zone_value", + cidr="cidr_value", + backup_subnet_cidr="backup_subnet_cidr_value", + network="network_value", ) # Wrap the value into a proper Response obj @@ -2255,19 +2945,20 @@ def test_get_cloud_vm_cluster_rest(request_type): return_value = vm_cluster.CloudVmCluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_cloud_vm_cluster(request) # Establish that the response is the type that we expect. assert isinstance(response, vm_cluster.CloudVmCluster) - assert response.name == 'name_value' - assert response.exadata_infrastructure == 'exadata_infrastructure_value' - assert response.display_name == 'display_name_value' - assert response.gcp_oracle_zone == 'gcp_oracle_zone_value' - assert response.cidr == 'cidr_value' - assert response.backup_subnet_cidr == 'backup_subnet_cidr_value' - assert response.network == 'network_value' + assert response.name == "name_value" + assert response.exadata_infrastructure == "exadata_infrastructure_value" + assert response.display_name == "display_name_value" + assert response.gcp_oracle_zone == "gcp_oracle_zone_value" + assert response.cidr == "cidr_value" + assert response.backup_subnet_cidr == "backup_subnet_cidr_value" + assert response.network == "network_value" + def test_get_cloud_vm_cluster_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2283,12 +2974,18 @@ def test_get_cloud_vm_cluster_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_cloud_vm_cluster in client._transport._wrapped_methods + assert ( + client._transport.get_cloud_vm_cluster in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_cloud_vm_cluster] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_cloud_vm_cluster + ] = mock_rpc request = {} client.get_cloud_vm_cluster(request) @@ -2303,55 +3000,60 @@ def test_get_cloud_vm_cluster_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_get_cloud_vm_cluster_rest_required_fields(request_type=oracledatabase.GetCloudVmClusterRequest): +def test_get_cloud_vm_cluster_rest_required_fields( + request_type=oracledatabase.GetCloudVmClusterRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cloud_vm_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cloud_vm_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = vm_cluster.CloudVmCluster() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -2362,39 +3064,48 @@ def test_get_cloud_vm_cluster_rest_required_fields(request_type=oracledatabase.G return_value = vm_cluster.CloudVmCluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_cloud_vm_cluster(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_get_cloud_vm_cluster_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.get_cloud_vm_cluster._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_cloud_vm_cluster_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_get_cloud_vm_cluster") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_get_cloud_vm_cluster") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_get_cloud_vm_cluster" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_get_cloud_vm_cluster" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.GetCloudVmClusterRequest.pb(oracledatabase.GetCloudVmClusterRequest()) + pb_message = oracledatabase.GetCloudVmClusterRequest.pb( + oracledatabase.GetCloudVmClusterRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -2405,34 +3116,48 @@ def test_get_cloud_vm_cluster_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = vm_cluster.CloudVmCluster.to_json(vm_cluster.CloudVmCluster()) + req.return_value._content = vm_cluster.CloudVmCluster.to_json( + vm_cluster.CloudVmCluster() + ) request = oracledatabase.GetCloudVmClusterRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = vm_cluster.CloudVmCluster() - client.get_cloud_vm_cluster(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_cloud_vm_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_get_cloud_vm_cluster_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.GetCloudVmClusterRequest): +def test_get_cloud_vm_cluster_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.GetCloudVmClusterRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -2448,16 +3173,18 @@ def test_get_cloud_vm_cluster_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = vm_cluster.CloudVmCluster() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -2467,7 +3194,7 @@ def test_get_cloud_vm_cluster_rest_flattened(): # Convert return value to protobuf type return_value = vm_cluster.CloudVmCluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.get_cloud_vm_cluster(**mock_args) @@ -2476,10 +3203,14 @@ def test_get_cloud_vm_cluster_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/cloudVmClusters/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/cloudVmClusters/*}" + % client.transport._host, + args[1], + ) -def test_get_cloud_vm_cluster_rest_flattened_error(transport: str = 'rest'): +def test_get_cloud_vm_cluster_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2490,21 +3221,23 @@ def test_get_cloud_vm_cluster_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_cloud_vm_cluster( oracledatabase.GetCloudVmClusterRequest(), - name='name_value', + name="name_value", ) def test_get_cloud_vm_cluster_rest_error(): client = OracleDatabaseClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) -@pytest.mark.parametrize("request_type", [ - oracledatabase.CreateCloudVmClusterRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.CreateCloudVmClusterRequest, + dict, + ], +) def test_create_cloud_vm_cluster_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2512,14 +3245,64 @@ def test_create_cloud_vm_cluster_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["cloud_vm_cluster"] = {'name': 'name_value', 'exadata_infrastructure': 'exadata_infrastructure_value', 'display_name': 'display_name_value', 'gcp_oracle_zone': 'gcp_oracle_zone_value', 'properties': {'ocid': 'ocid_value', 'license_type': 1, 'gi_version': 'gi_version_value', 'time_zone': {'id': 'id_value', 'version': 'version_value'}, 'ssh_public_keys': ['ssh_public_keys_value1', 'ssh_public_keys_value2'], 'node_count': 1070, 'shape': 'shape_value', 'ocpu_count': 0.1087, 'memory_size_gb': 1499, 'db_node_storage_size_gb': 2401, 'storage_size_gb': 1591, 'data_storage_size_tb': 0.2109, 'disk_redundancy': 1, 'sparse_diskgroup_enabled': True, 'local_backup_enabled': True, 'hostname_prefix': 'hostname_prefix_value', 'diagnostics_data_collection_options': {'diagnostics_events_enabled': True, 'health_monitoring_enabled': True, 'incident_logs_enabled': True}, 'state': 1, 'scan_listener_port_tcp': 2356, 'scan_listener_port_tcp_ssl': 2789, 'domain': 'domain_value', 'scan_dns': 'scan_dns_value', 'hostname': 'hostname_value', 'cpu_core_count': 1496, 'system_version': 'system_version_value', 'scan_ip_ids': ['scan_ip_ids_value1', 'scan_ip_ids_value2'], 'scan_dns_record_id': 'scan_dns_record_id_value', 'oci_url': 'oci_url_value', 'db_server_ocids': ['db_server_ocids_value1', 'db_server_ocids_value2'], 'compartment_id': 'compartment_id_value', 'dns_listener_ip': 'dns_listener_ip_value', 'cluster_name': 'cluster_name_value'}, 'labels': {}, 'create_time': {'seconds': 751, 'nanos': 543}, 'cidr': 'cidr_value', 'backup_subnet_cidr': 'backup_subnet_cidr_value', 'network': 'network_value'} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["cloud_vm_cluster"] = { + "name": "name_value", + "exadata_infrastructure": "exadata_infrastructure_value", + "display_name": "display_name_value", + "gcp_oracle_zone": "gcp_oracle_zone_value", + "properties": { + "ocid": "ocid_value", + "license_type": 1, + "gi_version": "gi_version_value", + "time_zone": {"id": "id_value", "version": "version_value"}, + "ssh_public_keys": ["ssh_public_keys_value1", "ssh_public_keys_value2"], + "node_count": 1070, + "shape": "shape_value", + "ocpu_count": 0.1087, + "memory_size_gb": 1499, + "db_node_storage_size_gb": 2401, + "storage_size_gb": 1591, + "data_storage_size_tb": 0.2109, + "disk_redundancy": 1, + "sparse_diskgroup_enabled": True, + "local_backup_enabled": True, + "hostname_prefix": "hostname_prefix_value", + "diagnostics_data_collection_options": { + "diagnostics_events_enabled": True, + "health_monitoring_enabled": True, + "incident_logs_enabled": True, + }, + "state": 1, + "scan_listener_port_tcp": 2356, + "scan_listener_port_tcp_ssl": 2789, + "domain": "domain_value", + "scan_dns": "scan_dns_value", + "hostname": "hostname_value", + "cpu_core_count": 1496, + "system_version": "system_version_value", + "scan_ip_ids": ["scan_ip_ids_value1", "scan_ip_ids_value2"], + "scan_dns_record_id": "scan_dns_record_id_value", + "oci_url": "oci_url_value", + "db_server_ocids": ["db_server_ocids_value1", "db_server_ocids_value2"], + "compartment_id": "compartment_id_value", + "dns_listener_ip": "dns_listener_ip_value", + "cluster_name": "cluster_name_value", + }, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "cidr": "cidr_value", + "backup_subnet_cidr": "backup_subnet_cidr_value", + "network": "network_value", + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = oracledatabase.CreateCloudVmClusterRequest.meta.fields["cloud_vm_cluster"] + test_field = oracledatabase.CreateCloudVmClusterRequest.meta.fields[ + "cloud_vm_cluster" + ] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -2533,7 +3316,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -2547,7 +3330,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["cloud_vm_cluster"].items(): # pragma: NO COVER + for field, value in request_init["cloud_vm_cluster"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -2562,12 +3345,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -2580,22 +3367,23 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.create_cloud_vm_cluster(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" + def test_create_cloud_vm_cluster_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2610,12 +3398,19 @@ def test_create_cloud_vm_cluster_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_cloud_vm_cluster in client._transport._wrapped_methods + assert ( + client._transport.create_cloud_vm_cluster + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_cloud_vm_cluster] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_cloud_vm_cluster + ] = mock_rpc request = {} client.create_cloud_vm_cluster(request) @@ -2634,7 +3429,9 @@ def test_create_cloud_vm_cluster_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_create_cloud_vm_cluster_rest_required_fields(request_type=oracledatabase.CreateCloudVmClusterRequest): +def test_create_cloud_vm_cluster_rest_required_fields( + request_type=oracledatabase.CreateCloudVmClusterRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} @@ -2642,65 +3439,73 @@ def test_create_cloud_vm_cluster_rest_required_fields(request_type=oracledatabas request_init["cloud_vm_cluster_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "cloudVmClusterId" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cloud_vm_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "cloudVmClusterId" in jsonified_request assert jsonified_request["cloudVmClusterId"] == request_init["cloud_vm_cluster_id"] - jsonified_request["parent"] = 'parent_value' - jsonified_request["cloudVmClusterId"] = 'cloud_vm_cluster_id_value' + jsonified_request["parent"] = "parent_value" + jsonified_request["cloudVmClusterId"] = "cloud_vm_cluster_id_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cloud_vm_cluster._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("cloud_vm_cluster_id", "request_id", )) + assert not set(unset_fields) - set( + ( + "cloud_vm_cluster_id", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" assert "cloudVmClusterId" in jsonified_request - assert jsonified_request["cloudVmClusterId"] == 'cloud_vm_cluster_id_value' + assert jsonified_request["cloudVmClusterId"] == "cloud_vm_cluster_id_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.create_cloud_vm_cluster(request) @@ -2710,34 +3515,60 @@ def test_create_cloud_vm_cluster_rest_required_fields(request_type=oracledatabas "cloudVmClusterId", "", ), - ('$alt', 'json;enum-encoding=int') + ("$alt", "json;enum-encoding=int"), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_create_cloud_vm_cluster_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.create_cloud_vm_cluster._get_unset_required_fields({}) - assert set(unset_fields) == (set(("cloudVmClusterId", "requestId", )) & set(("parent", "cloudVmClusterId", "cloudVmCluster", ))) + assert set(unset_fields) == ( + set( + ( + "cloudVmClusterId", + "requestId", + ) + ) + & set( + ( + "parent", + "cloudVmClusterId", + "cloudVmCluster", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_create_cloud_vm_cluster_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_create_cloud_vm_cluster") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_create_cloud_vm_cluster") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_create_cloud_vm_cluster" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_create_cloud_vm_cluster" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.CreateCloudVmClusterRequest.pb(oracledatabase.CreateCloudVmClusterRequest()) + pb_message = oracledatabase.CreateCloudVmClusterRequest.pb( + oracledatabase.CreateCloudVmClusterRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -2748,34 +3579,46 @@ def test_create_cloud_vm_cluster_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) request = oracledatabase.CreateCloudVmClusterRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_cloud_vm_cluster(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.create_cloud_vm_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_create_cloud_vm_cluster_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.CreateCloudVmClusterRequest): +def test_create_cloud_vm_cluster_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.CreateCloudVmClusterRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -2791,18 +3634,18 @@ def test_create_cloud_vm_cluster_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', - cloud_vm_cluster=vm_cluster.CloudVmCluster(name='name_value'), - cloud_vm_cluster_id='cloud_vm_cluster_id_value', + parent="parent_value", + cloud_vm_cluster=vm_cluster.CloudVmCluster(name="name_value"), + cloud_vm_cluster_id="cloud_vm_cluster_id_value", ) mock_args.update(sample_request) @@ -2810,7 +3653,7 @@ def test_create_cloud_vm_cluster_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.create_cloud_vm_cluster(**mock_args) @@ -2819,10 +3662,14 @@ def test_create_cloud_vm_cluster_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/cloudVmClusters" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/cloudVmClusters" + % client.transport._host, + args[1], + ) -def test_create_cloud_vm_cluster_rest_flattened_error(transport: str = 'rest'): +def test_create_cloud_vm_cluster_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2833,23 +3680,25 @@ def test_create_cloud_vm_cluster_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.create_cloud_vm_cluster( oracledatabase.CreateCloudVmClusterRequest(), - parent='parent_value', - cloud_vm_cluster=vm_cluster.CloudVmCluster(name='name_value'), - cloud_vm_cluster_id='cloud_vm_cluster_id_value', + parent="parent_value", + cloud_vm_cluster=vm_cluster.CloudVmCluster(name="name_value"), + cloud_vm_cluster_id="cloud_vm_cluster_id_value", ) def test_create_cloud_vm_cluster_rest_error(): client = OracleDatabaseClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) -@pytest.mark.parametrize("request_type", [ - oracledatabase.DeleteCloudVmClusterRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.DeleteCloudVmClusterRequest, + dict, + ], +) def test_delete_cloud_vm_cluster_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2857,26 +3706,29 @@ def test_delete_cloud_vm_cluster_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_cloud_vm_cluster(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" + def test_delete_cloud_vm_cluster_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2891,12 +3743,19 @@ def test_delete_cloud_vm_cluster_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_cloud_vm_cluster in client._transport._wrapped_methods + assert ( + client._transport.delete_cloud_vm_cluster + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_cloud_vm_cluster] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_cloud_vm_cluster + ] = mock_rpc request = {} client.delete_cloud_vm_cluster(request) @@ -2915,57 +3774,67 @@ def test_delete_cloud_vm_cluster_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_delete_cloud_vm_cluster_rest_required_fields(request_type=oracledatabase.DeleteCloudVmClusterRequest): +def test_delete_cloud_vm_cluster_rest_required_fields( + request_type=oracledatabase.DeleteCloudVmClusterRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cloud_vm_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cloud_vm_cluster._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("force", "request_id", )) + assert not set(unset_fields) - set( + ( + "force", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -2973,40 +3842,58 @@ def test_delete_cloud_vm_cluster_rest_required_fields(request_type=oracledatabas response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_cloud_vm_cluster(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_delete_cloud_vm_cluster_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.delete_cloud_vm_cluster._get_unset_required_fields({}) - assert set(unset_fields) == (set(("force", "requestId", )) & set(("name", ))) + assert set(unset_fields) == ( + set( + ( + "force", + "requestId", + ) + ) + & set(("name",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_delete_cloud_vm_cluster_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_delete_cloud_vm_cluster") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_delete_cloud_vm_cluster") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_delete_cloud_vm_cluster" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_delete_cloud_vm_cluster" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.DeleteCloudVmClusterRequest.pb(oracledatabase.DeleteCloudVmClusterRequest()) + pb_message = oracledatabase.DeleteCloudVmClusterRequest.pb( + oracledatabase.DeleteCloudVmClusterRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -3017,34 +3904,48 @@ def test_delete_cloud_vm_cluster_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) request = oracledatabase.DeleteCloudVmClusterRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_cloud_vm_cluster(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.delete_cloud_vm_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_delete_cloud_vm_cluster_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.DeleteCloudVmClusterRequest): +def test_delete_cloud_vm_cluster_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.DeleteCloudVmClusterRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -3060,16 +3961,18 @@ def test_delete_cloud_vm_cluster_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -3077,7 +3980,7 @@ def test_delete_cloud_vm_cluster_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.delete_cloud_vm_cluster(**mock_args) @@ -3086,10 +3989,14 @@ def test_delete_cloud_vm_cluster_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/cloudVmClusters/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/cloudVmClusters/*}" + % client.transport._host, + args[1], + ) -def test_delete_cloud_vm_cluster_rest_flattened_error(transport: str = 'rest'): +def test_delete_cloud_vm_cluster_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3100,21 +4007,23 @@ def test_delete_cloud_vm_cluster_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.delete_cloud_vm_cluster( oracledatabase.DeleteCloudVmClusterRequest(), - name='name_value', + name="name_value", ) def test_delete_cloud_vm_cluster_rest_error(): client = OracleDatabaseClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) -@pytest.mark.parametrize("request_type", [ - oracledatabase.ListEntitlementsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListEntitlementsRequest, + dict, + ], +) def test_list_entitlements_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3122,14 +4031,14 @@ def test_list_entitlements_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListEntitlementsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -3139,13 +4048,14 @@ def test_list_entitlements_rest(request_type): return_value = oracledatabase.ListEntitlementsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_entitlements(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListEntitlementsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" + def test_list_entitlements_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3165,8 +4075,12 @@ def test_list_entitlements_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_entitlements] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_entitlements + ] = mock_rpc request = {} client.list_entitlements(request) @@ -3181,57 +4095,67 @@ def test_list_entitlements_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_entitlements_rest_required_fields(request_type=oracledatabase.ListEntitlementsRequest): +def test_list_entitlements_rest_required_fields( + request_type=oracledatabase.ListEntitlementsRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_entitlements._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_entitlements._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_entitlements._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_entitlements._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = oracledatabase.ListEntitlementsResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -3242,39 +4166,56 @@ def test_list_entitlements_rest_required_fields(request_type=oracledatabase.List return_value = oracledatabase.ListEntitlementsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_entitlements(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_entitlements_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_entitlements._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_entitlements_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_entitlements") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_entitlements") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_entitlements" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_entitlements" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.ListEntitlementsRequest.pb(oracledatabase.ListEntitlementsRequest()) + pb_message = oracledatabase.ListEntitlementsRequest.pb( + oracledatabase.ListEntitlementsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -3285,34 +4226,46 @@ def test_list_entitlements_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = oracledatabase.ListEntitlementsResponse.to_json(oracledatabase.ListEntitlementsResponse()) + req.return_value._content = oracledatabase.ListEntitlementsResponse.to_json( + oracledatabase.ListEntitlementsResponse() + ) request = oracledatabase.ListEntitlementsRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = oracledatabase.ListEntitlementsResponse() - client.list_entitlements(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_entitlements( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_list_entitlements_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListEntitlementsRequest): +def test_list_entitlements_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListEntitlementsRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -3328,16 +4281,16 @@ def test_list_entitlements_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListEntitlementsResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -3347,7 +4300,7 @@ def test_list_entitlements_rest_flattened(): # Convert return value to protobuf type return_value = oracledatabase.ListEntitlementsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.list_entitlements(**mock_args) @@ -3356,10 +4309,14 @@ def test_list_entitlements_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/entitlements" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/entitlements" + % client.transport._host, + args[1], + ) -def test_list_entitlements_rest_flattened_error(transport: str = 'rest'): +def test_list_entitlements_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3370,20 +4327,20 @@ def test_list_entitlements_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_entitlements( oracledatabase.ListEntitlementsRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_entitlements_rest_pager(transport: str = 'rest'): +def test_list_entitlements_rest_pager(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( oracledatabase.ListEntitlementsResponse( @@ -3392,17 +4349,17 @@ def test_list_entitlements_rest_pager(transport: str = 'rest'): entitlement.Entitlement(), entitlement.Entitlement(), ], - next_page_token='abc', + next_page_token="abc", ), oracledatabase.ListEntitlementsResponse( entitlements=[], - next_page_token='def', + next_page_token="def", ), oracledatabase.ListEntitlementsResponse( entitlements=[ entitlement.Entitlement(), ], - next_page_token='ghi', + next_page_token="ghi", ), oracledatabase.ListEntitlementsResponse( entitlements=[ @@ -3415,31 +4372,35 @@ def test_list_entitlements_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(oracledatabase.ListEntitlementsResponse.to_json(x) for x in response) + response = tuple( + oracledatabase.ListEntitlementsResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} pager = client.list_entitlements(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, entitlement.Entitlement) - for i in results) + assert all(isinstance(i, entitlement.Entitlement) for i in results) pages = list(client.list_entitlements(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - oracledatabase.ListDbServersRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListDbServersRequest, + dict, + ], +) def test_list_db_servers_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3447,14 +4408,16 @@ def test_list_db_servers_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + request_init = { + "parent": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListDbServersResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -3464,13 +4427,14 @@ def test_list_db_servers_rest(request_type): return_value = oracledatabase.ListDbServersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_db_servers(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListDbServersPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" + def test_list_db_servers_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3490,7 +4454,9 @@ def test_list_db_servers_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_db_servers] = mock_rpc request = {} @@ -3506,57 +4472,67 @@ def test_list_db_servers_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_db_servers_rest_required_fields(request_type=oracledatabase.ListDbServersRequest): +def test_list_db_servers_rest_required_fields( + request_type=oracledatabase.ListDbServersRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_db_servers._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_db_servers._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_db_servers._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_db_servers._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = oracledatabase.ListDbServersResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -3567,39 +4543,56 @@ def test_list_db_servers_rest_required_fields(request_type=oracledatabase.ListDb return_value = oracledatabase.ListDbServersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_db_servers(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_db_servers_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_db_servers._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_db_servers_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_db_servers") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_db_servers") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_db_servers" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_db_servers" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.ListDbServersRequest.pb(oracledatabase.ListDbServersRequest()) + pb_message = oracledatabase.ListDbServersRequest.pb( + oracledatabase.ListDbServersRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -3610,34 +4603,48 @@ def test_list_db_servers_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = oracledatabase.ListDbServersResponse.to_json(oracledatabase.ListDbServersResponse()) + req.return_value._content = oracledatabase.ListDbServersResponse.to_json( + oracledatabase.ListDbServersResponse() + ) request = oracledatabase.ListDbServersRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = oracledatabase.ListDbServersResponse() - client.list_db_servers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_db_servers( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_list_db_servers_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListDbServersRequest): +def test_list_db_servers_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListDbServersRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + request_init = { + "parent": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -3653,16 +4660,18 @@ def test_list_db_servers_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListDbServersResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + sample_request = { + "parent": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -3672,7 +4681,7 @@ def test_list_db_servers_rest_flattened(): # Convert return value to protobuf type return_value = oracledatabase.ListDbServersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.list_db_servers(**mock_args) @@ -3681,10 +4690,14 @@ def test_list_db_servers_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/cloudExadataInfrastructures/*}/dbServers" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/cloudExadataInfrastructures/*}/dbServers" + % client.transport._host, + args[1], + ) -def test_list_db_servers_rest_flattened_error(transport: str = 'rest'): +def test_list_db_servers_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3695,20 +4708,20 @@ def test_list_db_servers_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_db_servers( oracledatabase.ListDbServersRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_db_servers_rest_pager(transport: str = 'rest'): +def test_list_db_servers_rest_pager(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( oracledatabase.ListDbServersResponse( @@ -3717,17 +4730,17 @@ def test_list_db_servers_rest_pager(transport: str = 'rest'): db_server.DbServer(), db_server.DbServer(), ], - next_page_token='abc', + next_page_token="abc", ), oracledatabase.ListDbServersResponse( db_servers=[], - next_page_token='def', + next_page_token="def", ), oracledatabase.ListDbServersResponse( db_servers=[ db_server.DbServer(), ], - next_page_token='ghi', + next_page_token="ghi", ), oracledatabase.ListDbServersResponse( db_servers=[ @@ -3740,31 +4753,37 @@ def test_list_db_servers_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(oracledatabase.ListDbServersResponse.to_json(x) for x in response) + response = tuple( + oracledatabase.ListDbServersResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + sample_request = { + "parent": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } pager = client.list_db_servers(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, db_server.DbServer) - for i in results) + assert all(isinstance(i, db_server.DbServer) for i in results) pages = list(client.list_db_servers(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - oracledatabase.ListDbNodesRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListDbNodesRequest, + dict, + ], +) def test_list_db_nodes_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3772,14 +4791,16 @@ def test_list_db_nodes_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + request_init = { + "parent": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListDbNodesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -3789,13 +4810,14 @@ def test_list_db_nodes_rest(request_type): return_value = oracledatabase.ListDbNodesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_db_nodes(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListDbNodesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" + def test_list_db_nodes_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3815,7 +4837,9 @@ def test_list_db_nodes_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_db_nodes] = mock_rpc request = {} @@ -3831,57 +4855,67 @@ def test_list_db_nodes_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_db_nodes_rest_required_fields(request_type=oracledatabase.ListDbNodesRequest): +def test_list_db_nodes_rest_required_fields( + request_type=oracledatabase.ListDbNodesRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_db_nodes._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_db_nodes._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_db_nodes._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_db_nodes._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = oracledatabase.ListDbNodesResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -3892,39 +4926,56 @@ def test_list_db_nodes_rest_required_fields(request_type=oracledatabase.ListDbNo return_value = oracledatabase.ListDbNodesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_db_nodes(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_db_nodes_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_db_nodes._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_db_nodes_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_db_nodes") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_db_nodes") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_db_nodes" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_db_nodes" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.ListDbNodesRequest.pb(oracledatabase.ListDbNodesRequest()) + pb_message = oracledatabase.ListDbNodesRequest.pb( + oracledatabase.ListDbNodesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -3935,34 +4986,48 @@ def test_list_db_nodes_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = oracledatabase.ListDbNodesResponse.to_json(oracledatabase.ListDbNodesResponse()) + req.return_value._content = oracledatabase.ListDbNodesResponse.to_json( + oracledatabase.ListDbNodesResponse() + ) request = oracledatabase.ListDbNodesRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = oracledatabase.ListDbNodesResponse() - client.list_db_nodes(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_db_nodes( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_list_db_nodes_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListDbNodesRequest): +def test_list_db_nodes_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListDbNodesRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + request_init = { + "parent": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -3978,16 +5043,18 @@ def test_list_db_nodes_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListDbNodesResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + sample_request = { + "parent": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -3997,7 +5064,7 @@ def test_list_db_nodes_rest_flattened(): # Convert return value to protobuf type return_value = oracledatabase.ListDbNodesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.list_db_nodes(**mock_args) @@ -4006,10 +5073,14 @@ def test_list_db_nodes_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/cloudVmClusters/*}/dbNodes" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/cloudVmClusters/*}/dbNodes" + % client.transport._host, + args[1], + ) -def test_list_db_nodes_rest_flattened_error(transport: str = 'rest'): +def test_list_db_nodes_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4020,20 +5091,20 @@ def test_list_db_nodes_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_db_nodes( oracledatabase.ListDbNodesRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_db_nodes_rest_pager(transport: str = 'rest'): +def test_list_db_nodes_rest_pager(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( oracledatabase.ListDbNodesResponse( @@ -4042,17 +5113,17 @@ def test_list_db_nodes_rest_pager(transport: str = 'rest'): db_node.DbNode(), db_node.DbNode(), ], - next_page_token='abc', + next_page_token="abc", ), oracledatabase.ListDbNodesResponse( db_nodes=[], - next_page_token='def', + next_page_token="def", ), oracledatabase.ListDbNodesResponse( db_nodes=[ db_node.DbNode(), ], - next_page_token='ghi', + next_page_token="ghi", ), oracledatabase.ListDbNodesResponse( db_nodes=[ @@ -4065,31 +5136,37 @@ def test_list_db_nodes_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(oracledatabase.ListDbNodesResponse.to_json(x) for x in response) + response = tuple( + oracledatabase.ListDbNodesResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + sample_request = { + "parent": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } pager = client.list_db_nodes(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, db_node.DbNode) - for i in results) + assert all(isinstance(i, db_node.DbNode) for i in results) pages = list(client.list_db_nodes(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - oracledatabase.ListGiVersionsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListGiVersionsRequest, + dict, + ], +) def test_list_gi_versions_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4097,14 +5174,14 @@ def test_list_gi_versions_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListGiVersionsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -4114,13 +5191,14 @@ def test_list_gi_versions_rest(request_type): return_value = oracledatabase.ListGiVersionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_gi_versions(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListGiVersionsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" + def test_list_gi_versions_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4140,8 +5218,12 @@ def test_list_gi_versions_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_gi_versions] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_gi_versions + ] = mock_rpc request = {} client.list_gi_versions(request) @@ -4156,57 +5238,67 @@ def test_list_gi_versions_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_gi_versions_rest_required_fields(request_type=oracledatabase.ListGiVersionsRequest): +def test_list_gi_versions_rest_required_fields( + request_type=oracledatabase.ListGiVersionsRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_gi_versions._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_gi_versions._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_gi_versions._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_gi_versions._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = oracledatabase.ListGiVersionsResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -4217,39 +5309,56 @@ def test_list_gi_versions_rest_required_fields(request_type=oracledatabase.ListG return_value = oracledatabase.ListGiVersionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_gi_versions(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_gi_versions_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_gi_versions._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_gi_versions_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_gi_versions") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_gi_versions") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_gi_versions" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_gi_versions" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.ListGiVersionsRequest.pb(oracledatabase.ListGiVersionsRequest()) + pb_message = oracledatabase.ListGiVersionsRequest.pb( + oracledatabase.ListGiVersionsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -4260,34 +5369,46 @@ def test_list_gi_versions_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = oracledatabase.ListGiVersionsResponse.to_json(oracledatabase.ListGiVersionsResponse()) + req.return_value._content = oracledatabase.ListGiVersionsResponse.to_json( + oracledatabase.ListGiVersionsResponse() + ) request = oracledatabase.ListGiVersionsRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = oracledatabase.ListGiVersionsResponse() - client.list_gi_versions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_gi_versions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_list_gi_versions_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListGiVersionsRequest): +def test_list_gi_versions_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListGiVersionsRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -4303,16 +5424,16 @@ def test_list_gi_versions_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListGiVersionsResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -4322,7 +5443,7 @@ def test_list_gi_versions_rest_flattened(): # Convert return value to protobuf type return_value = oracledatabase.ListGiVersionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.list_gi_versions(**mock_args) @@ -4331,10 +5452,13 @@ def test_list_gi_versions_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/giVersions" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/giVersions" % client.transport._host, + args[1], + ) -def test_list_gi_versions_rest_flattened_error(transport: str = 'rest'): +def test_list_gi_versions_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4345,20 +5469,20 @@ def test_list_gi_versions_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_gi_versions( oracledatabase.ListGiVersionsRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_gi_versions_rest_pager(transport: str = 'rest'): +def test_list_gi_versions_rest_pager(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( oracledatabase.ListGiVersionsResponse( @@ -4367,17 +5491,17 @@ def test_list_gi_versions_rest_pager(transport: str = 'rest'): gi_version.GiVersion(), gi_version.GiVersion(), ], - next_page_token='abc', + next_page_token="abc", ), oracledatabase.ListGiVersionsResponse( gi_versions=[], - next_page_token='def', + next_page_token="def", ), oracledatabase.ListGiVersionsResponse( gi_versions=[ gi_version.GiVersion(), ], - next_page_token='ghi', + next_page_token="ghi", ), oracledatabase.ListGiVersionsResponse( gi_versions=[ @@ -4390,31 +5514,35 @@ def test_list_gi_versions_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(oracledatabase.ListGiVersionsResponse.to_json(x) for x in response) + response = tuple( + oracledatabase.ListGiVersionsResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} pager = client.list_gi_versions(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, gi_version.GiVersion) - for i in results) + assert all(isinstance(i, gi_version.GiVersion) for i in results) pages = list(client.list_gi_versions(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - oracledatabase.ListDbSystemShapesRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListDbSystemShapesRequest, + dict, + ], +) def test_list_db_system_shapes_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4422,14 +5550,14 @@ def test_list_db_system_shapes_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListDbSystemShapesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -4439,13 +5567,14 @@ def test_list_db_system_shapes_rest(request_type): return_value = oracledatabase.ListDbSystemShapesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_db_system_shapes(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListDbSystemShapesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" + def test_list_db_system_shapes_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4461,12 +5590,19 @@ def test_list_db_system_shapes_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_db_system_shapes in client._transport._wrapped_methods + assert ( + client._transport.list_db_system_shapes + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_db_system_shapes] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_db_system_shapes + ] = mock_rpc request = {} client.list_db_system_shapes(request) @@ -4481,57 +5617,67 @@ def test_list_db_system_shapes_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_db_system_shapes_rest_required_fields(request_type=oracledatabase.ListDbSystemShapesRequest): +def test_list_db_system_shapes_rest_required_fields( + request_type=oracledatabase.ListDbSystemShapesRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_db_system_shapes._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_db_system_shapes._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_db_system_shapes._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_db_system_shapes._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = oracledatabase.ListDbSystemShapesResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -4542,39 +5688,56 @@ def test_list_db_system_shapes_rest_required_fields(request_type=oracledatabase. return_value = oracledatabase.ListDbSystemShapesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_db_system_shapes(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_db_system_shapes_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_db_system_shapes._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_db_system_shapes_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_db_system_shapes") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_db_system_shapes") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_db_system_shapes" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_db_system_shapes" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.ListDbSystemShapesRequest.pb(oracledatabase.ListDbSystemShapesRequest()) + pb_message = oracledatabase.ListDbSystemShapesRequest.pb( + oracledatabase.ListDbSystemShapesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -4585,34 +5748,46 @@ def test_list_db_system_shapes_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = oracledatabase.ListDbSystemShapesResponse.to_json(oracledatabase.ListDbSystemShapesResponse()) + req.return_value._content = oracledatabase.ListDbSystemShapesResponse.to_json( + oracledatabase.ListDbSystemShapesResponse() + ) request = oracledatabase.ListDbSystemShapesRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = oracledatabase.ListDbSystemShapesResponse() - client.list_db_system_shapes(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_db_system_shapes( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_list_db_system_shapes_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListDbSystemShapesRequest): +def test_list_db_system_shapes_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListDbSystemShapesRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -4628,16 +5803,16 @@ def test_list_db_system_shapes_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListDbSystemShapesResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -4647,7 +5822,7 @@ def test_list_db_system_shapes_rest_flattened(): # Convert return value to protobuf type return_value = oracledatabase.ListDbSystemShapesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.list_db_system_shapes(**mock_args) @@ -4656,10 +5831,14 @@ def test_list_db_system_shapes_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/dbSystemShapes" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/dbSystemShapes" + % client.transport._host, + args[1], + ) -def test_list_db_system_shapes_rest_flattened_error(transport: str = 'rest'): +def test_list_db_system_shapes_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4670,20 +5849,20 @@ def test_list_db_system_shapes_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_db_system_shapes( oracledatabase.ListDbSystemShapesRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_db_system_shapes_rest_pager(transport: str = 'rest'): +def test_list_db_system_shapes_rest_pager(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( oracledatabase.ListDbSystemShapesResponse( @@ -4692,17 +5871,17 @@ def test_list_db_system_shapes_rest_pager(transport: str = 'rest'): db_system_shape.DbSystemShape(), db_system_shape.DbSystemShape(), ], - next_page_token='abc', + next_page_token="abc", ), oracledatabase.ListDbSystemShapesResponse( db_system_shapes=[], - next_page_token='def', + next_page_token="def", ), oracledatabase.ListDbSystemShapesResponse( db_system_shapes=[ db_system_shape.DbSystemShape(), ], - next_page_token='ghi', + next_page_token="ghi", ), oracledatabase.ListDbSystemShapesResponse( db_system_shapes=[ @@ -4715,31 +5894,35 @@ def test_list_db_system_shapes_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(oracledatabase.ListDbSystemShapesResponse.to_json(x) for x in response) + response = tuple( + oracledatabase.ListDbSystemShapesResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} pager = client.list_db_system_shapes(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, db_system_shape.DbSystemShape) - for i in results) + assert all(isinstance(i, db_system_shape.DbSystemShape) for i in results) pages = list(client.list_db_system_shapes(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - oracledatabase.ListAutonomousDatabasesRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListAutonomousDatabasesRequest, + dict, + ], +) def test_list_autonomous_databases_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4747,14 +5930,14 @@ def test_list_autonomous_databases_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListAutonomousDatabasesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -4764,13 +5947,14 @@ def test_list_autonomous_databases_rest(request_type): return_value = oracledatabase.ListAutonomousDatabasesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_autonomous_databases(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAutonomousDatabasesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" + def test_list_autonomous_databases_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4786,12 +5970,19 @@ def test_list_autonomous_databases_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_autonomous_databases in client._transport._wrapped_methods + assert ( + client._transport.list_autonomous_databases + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_autonomous_databases] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_autonomous_databases + ] = mock_rpc request = {} client.list_autonomous_databases(request) @@ -4806,57 +5997,69 @@ def test_list_autonomous_databases_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_autonomous_databases_rest_required_fields(request_type=oracledatabase.ListAutonomousDatabasesRequest): +def test_list_autonomous_databases_rest_required_fields( + request_type=oracledatabase.ListAutonomousDatabasesRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_autonomous_databases._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_databases._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_autonomous_databases._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_databases._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = oracledatabase.ListAutonomousDatabasesResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -4864,42 +6067,63 @@ def test_list_autonomous_databases_rest_required_fields(request_type=oracledatab response_value.status_code = 200 # Convert return value to protobuf type - return_value = oracledatabase.ListAutonomousDatabasesResponse.pb(return_value) + return_value = oracledatabase.ListAutonomousDatabasesResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_autonomous_databases(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_autonomous_databases_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_autonomous_databases._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_autonomous_databases_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_autonomous_databases") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_autonomous_databases") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_autonomous_databases" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_autonomous_databases" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.ListAutonomousDatabasesRequest.pb(oracledatabase.ListAutonomousDatabasesRequest()) + pb_message = oracledatabase.ListAutonomousDatabasesRequest.pb( + oracledatabase.ListAutonomousDatabasesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -4910,34 +6134,48 @@ def test_list_autonomous_databases_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = oracledatabase.ListAutonomousDatabasesResponse.to_json(oracledatabase.ListAutonomousDatabasesResponse()) + req.return_value._content = ( + oracledatabase.ListAutonomousDatabasesResponse.to_json( + oracledatabase.ListAutonomousDatabasesResponse() + ) + ) request = oracledatabase.ListAutonomousDatabasesRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = oracledatabase.ListAutonomousDatabasesResponse() - client.list_autonomous_databases(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_autonomous_databases( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_list_autonomous_databases_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListAutonomousDatabasesRequest): +def test_list_autonomous_databases_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListAutonomousDatabasesRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -4953,16 +6191,16 @@ def test_list_autonomous_databases_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListAutonomousDatabasesResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -4972,7 +6210,7 @@ def test_list_autonomous_databases_rest_flattened(): # Convert return value to protobuf type return_value = oracledatabase.ListAutonomousDatabasesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.list_autonomous_databases(**mock_args) @@ -4981,10 +6219,14 @@ def test_list_autonomous_databases_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/autonomousDatabases" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/autonomousDatabases" + % client.transport._host, + args[1], + ) -def test_list_autonomous_databases_rest_flattened_error(transport: str = 'rest'): +def test_list_autonomous_databases_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4995,20 +6237,20 @@ def test_list_autonomous_databases_rest_flattened_error(transport: str = 'rest') with pytest.raises(ValueError): client.list_autonomous_databases( oracledatabase.ListAutonomousDatabasesRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_autonomous_databases_rest_pager(transport: str = 'rest'): +def test_list_autonomous_databases_rest_pager(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( oracledatabase.ListAutonomousDatabasesResponse( @@ -5017,17 +6259,17 @@ def test_list_autonomous_databases_rest_pager(transport: str = 'rest'): autonomous_database.AutonomousDatabase(), autonomous_database.AutonomousDatabase(), ], - next_page_token='abc', + next_page_token="abc", ), oracledatabase.ListAutonomousDatabasesResponse( autonomous_databases=[], - next_page_token='def', + next_page_token="def", ), oracledatabase.ListAutonomousDatabasesResponse( autonomous_databases=[ autonomous_database.AutonomousDatabase(), ], - next_page_token='ghi', + next_page_token="ghi", ), oracledatabase.ListAutonomousDatabasesResponse( autonomous_databases=[ @@ -5040,31 +6282,37 @@ def test_list_autonomous_databases_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(oracledatabase.ListAutonomousDatabasesResponse.to_json(x) for x in response) + response = tuple( + oracledatabase.ListAutonomousDatabasesResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} pager = client.list_autonomous_databases(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, autonomous_database.AutonomousDatabase) - for i in results) + assert all( + isinstance(i, autonomous_database.AutonomousDatabase) for i in results + ) pages = list(client.list_autonomous_databases(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - oracledatabase.GetAutonomousDatabaseRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.GetAutonomousDatabaseRequest, + dict, + ], +) def test_get_autonomous_database_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5072,20 +6320,22 @@ def test_get_autonomous_database_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = autonomous_database.AutonomousDatabase( - name='name_value', - database='database_value', - display_name='display_name_value', - entitlement_id='entitlement_id_value', - admin_password='admin_password_value', - network='network_value', - cidr='cidr_value', + name="name_value", + database="database_value", + display_name="display_name_value", + entitlement_id="entitlement_id_value", + admin_password="admin_password_value", + network="network_value", + cidr="cidr_value", ) # Wrap the value into a proper Response obj @@ -5095,19 +6345,20 @@ def test_get_autonomous_database_rest(request_type): return_value = autonomous_database.AutonomousDatabase.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_autonomous_database(request) # Establish that the response is the type that we expect. assert isinstance(response, autonomous_database.AutonomousDatabase) - assert response.name == 'name_value' - assert response.database == 'database_value' - assert response.display_name == 'display_name_value' - assert response.entitlement_id == 'entitlement_id_value' - assert response.admin_password == 'admin_password_value' - assert response.network == 'network_value' - assert response.cidr == 'cidr_value' + assert response.name == "name_value" + assert response.database == "database_value" + assert response.display_name == "display_name_value" + assert response.entitlement_id == "entitlement_id_value" + assert response.admin_password == "admin_password_value" + assert response.network == "network_value" + assert response.cidr == "cidr_value" + def test_get_autonomous_database_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5123,12 +6374,19 @@ def test_get_autonomous_database_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_autonomous_database in client._transport._wrapped_methods + assert ( + client._transport.get_autonomous_database + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_autonomous_database] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_autonomous_database + ] = mock_rpc request = {} client.get_autonomous_database(request) @@ -5143,55 +6401,60 @@ def test_get_autonomous_database_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_get_autonomous_database_rest_required_fields(request_type=oracledatabase.GetAutonomousDatabaseRequest): +def test_get_autonomous_database_rest_required_fields( + request_type=oracledatabase.GetAutonomousDatabaseRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_autonomous_database._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_autonomous_database._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_autonomous_database._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_autonomous_database._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = autonomous_database.AutonomousDatabase() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -5202,39 +6465,48 @@ def test_get_autonomous_database_rest_required_fields(request_type=oracledatabas return_value = autonomous_database.AutonomousDatabase.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_autonomous_database(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_get_autonomous_database_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.get_autonomous_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_autonomous_database_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_get_autonomous_database") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_get_autonomous_database") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_get_autonomous_database" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_get_autonomous_database" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.GetAutonomousDatabaseRequest.pb(oracledatabase.GetAutonomousDatabaseRequest()) + pb_message = oracledatabase.GetAutonomousDatabaseRequest.pb( + oracledatabase.GetAutonomousDatabaseRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -5245,34 +6517,48 @@ def test_get_autonomous_database_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = autonomous_database.AutonomousDatabase.to_json(autonomous_database.AutonomousDatabase()) + req.return_value._content = autonomous_database.AutonomousDatabase.to_json( + autonomous_database.AutonomousDatabase() + ) request = oracledatabase.GetAutonomousDatabaseRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = autonomous_database.AutonomousDatabase() - client.get_autonomous_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_autonomous_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_get_autonomous_database_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.GetAutonomousDatabaseRequest): +def test_get_autonomous_database_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.GetAutonomousDatabaseRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -5288,16 +6574,18 @@ def test_get_autonomous_database_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = autonomous_database.AutonomousDatabase() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -5307,7 +6595,7 @@ def test_get_autonomous_database_rest_flattened(): # Convert return value to protobuf type return_value = autonomous_database.AutonomousDatabase.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.get_autonomous_database(**mock_args) @@ -5316,10 +6604,14 @@ def test_get_autonomous_database_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/autonomousDatabases/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/autonomousDatabases/*}" + % client.transport._host, + args[1], + ) -def test_get_autonomous_database_rest_flattened_error(transport: str = 'rest'): +def test_get_autonomous_database_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5330,21 +6622,23 @@ def test_get_autonomous_database_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_autonomous_database( oracledatabase.GetAutonomousDatabaseRequest(), - name='name_value', + name="name_value", ) def test_get_autonomous_database_rest_error(): client = OracleDatabaseClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) -@pytest.mark.parametrize("request_type", [ - oracledatabase.CreateAutonomousDatabaseRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.CreateAutonomousDatabaseRequest, + dict, + ], +) def test_create_autonomous_database_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5352,14 +6646,143 @@ def test_create_autonomous_database_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["autonomous_database"] = {'name': 'name_value', 'database': 'database_value', 'display_name': 'display_name_value', 'entitlement_id': 'entitlement_id_value', 'admin_password': 'admin_password_value', 'properties': {'ocid': 'ocid_value', 'compute_count': 0.1413, 'cpu_core_count': 1496, 'data_storage_size_tb': 2109, 'data_storage_size_gb': 2096, 'db_workload': 1, 'db_edition': 1, 'character_set': 'character_set_value', 'n_character_set': 'n_character_set_value', 'private_endpoint_ip': 'private_endpoint_ip_value', 'private_endpoint_label': 'private_endpoint_label_value', 'db_version': 'db_version_value', 'is_auto_scaling_enabled': True, 'is_storage_auto_scaling_enabled': True, 'license_type': 1, 'customer_contacts': [{'email': 'email_value'}], 'secret_id': 'secret_id_value', 'vault_id': 'vault_id_value', 'maintenance_schedule_type': 1, 'mtls_connection_required': True, 'backup_retention_period_days': 2975, 'actual_used_data_storage_size_tb': 0.3366, 'allocated_storage_size_tb': 0.2636, 'apex_details': {'apex_version': 'apex_version_value', 'ords_version': 'ords_version_value'}, 'are_primary_allowlisted_ips_used': True, 'lifecycle_details': 'lifecycle_details_value', 'state': 1, 'autonomous_container_database_id': 'autonomous_container_database_id_value', 'available_upgrade_versions': ['available_upgrade_versions_value1', 'available_upgrade_versions_value2'], 'connection_strings': {'all_connection_strings': {'high': 'high_value', 'low': 'low_value', 'medium': 'medium_value'}, 'dedicated': 'dedicated_value', 'high': 'high_value', 'low': 'low_value', 'medium': 'medium_value', 'profiles': [{'consumer_group': 1, 'display_name': 'display_name_value', 'host_format': 1, 'is_regional': True, 'protocol': 1, 'session_mode': 1, 'syntax_format': 1, 'tls_authentication': 1, 'value': 'value_value'}]}, 'connection_urls': {'apex_uri': 'apex_uri_value', 'database_transforms_uri': 'database_transforms_uri_value', 'graph_studio_uri': 'graph_studio_uri_value', 'machine_learning_notebook_uri': 'machine_learning_notebook_uri_value', 'machine_learning_user_management_uri': 'machine_learning_user_management_uri_value', 'mongo_db_uri': 'mongo_db_uri_value', 'ords_uri': 'ords_uri_value', 'sql_dev_web_uri': 'sql_dev_web_uri_value'}, 'failed_data_recovery_duration': {'seconds': 751, 'nanos': 543}, 'memory_table_gbs': 1691, 'is_local_data_guard_enabled': True, 'local_adg_auto_failover_max_data_loss_limit': 4513, 'local_standby_db': {'lag_time_duration': {}, 'lifecycle_details': 'lifecycle_details_value', 'state': 1, 'data_guard_role_changed_time': {'seconds': 751, 'nanos': 543}, 'disaster_recovery_role_changed_time': {}}, 'memory_per_oracle_compute_unit_gbs': 3626, 'local_disaster_recovery_type': 1, 'data_safe_state': 1, 'database_management_state': 1, 'open_mode': 1, 'operations_insights_state': 1, 'peer_db_ids': ['peer_db_ids_value1', 'peer_db_ids_value2'], 'permission_level': 1, 'private_endpoint': 'private_endpoint_value', 'refreshable_mode': 1, 'refreshable_state': 1, 'role': 1, 'scheduled_operation_details': [{'day_of_week': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'stop_time': {}}], 'sql_web_developer_url': 'sql_web_developer_url_value', 'supported_clone_regions': ['supported_clone_regions_value1', 'supported_clone_regions_value2'], 'used_data_storage_size_tbs': 2752, 'oci_url': 'oci_url_value', 'total_auto_backup_storage_size_gbs': 0.36100000000000004, 'next_long_term_backup_time': {}, 'maintenance_begin_time': {}, 'maintenance_end_time': {}}, 'labels': {}, 'network': 'network_value', 'cidr': 'cidr_value', 'create_time': {}} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["autonomous_database"] = { + "name": "name_value", + "database": "database_value", + "display_name": "display_name_value", + "entitlement_id": "entitlement_id_value", + "admin_password": "admin_password_value", + "properties": { + "ocid": "ocid_value", + "compute_count": 0.1413, + "cpu_core_count": 1496, + "data_storage_size_tb": 2109, + "data_storage_size_gb": 2096, + "db_workload": 1, + "db_edition": 1, + "character_set": "character_set_value", + "n_character_set": "n_character_set_value", + "private_endpoint_ip": "private_endpoint_ip_value", + "private_endpoint_label": "private_endpoint_label_value", + "db_version": "db_version_value", + "is_auto_scaling_enabled": True, + "is_storage_auto_scaling_enabled": True, + "license_type": 1, + "customer_contacts": [{"email": "email_value"}], + "secret_id": "secret_id_value", + "vault_id": "vault_id_value", + "maintenance_schedule_type": 1, + "mtls_connection_required": True, + "backup_retention_period_days": 2975, + "actual_used_data_storage_size_tb": 0.3366, + "allocated_storage_size_tb": 0.2636, + "apex_details": { + "apex_version": "apex_version_value", + "ords_version": "ords_version_value", + }, + "are_primary_allowlisted_ips_used": True, + "lifecycle_details": "lifecycle_details_value", + "state": 1, + "autonomous_container_database_id": "autonomous_container_database_id_value", + "available_upgrade_versions": [ + "available_upgrade_versions_value1", + "available_upgrade_versions_value2", + ], + "connection_strings": { + "all_connection_strings": { + "high": "high_value", + "low": "low_value", + "medium": "medium_value", + }, + "dedicated": "dedicated_value", + "high": "high_value", + "low": "low_value", + "medium": "medium_value", + "profiles": [ + { + "consumer_group": 1, + "display_name": "display_name_value", + "host_format": 1, + "is_regional": True, + "protocol": 1, + "session_mode": 1, + "syntax_format": 1, + "tls_authentication": 1, + "value": "value_value", + } + ], + }, + "connection_urls": { + "apex_uri": "apex_uri_value", + "database_transforms_uri": "database_transforms_uri_value", + "graph_studio_uri": "graph_studio_uri_value", + "machine_learning_notebook_uri": "machine_learning_notebook_uri_value", + "machine_learning_user_management_uri": "machine_learning_user_management_uri_value", + "mongo_db_uri": "mongo_db_uri_value", + "ords_uri": "ords_uri_value", + "sql_dev_web_uri": "sql_dev_web_uri_value", + }, + "failed_data_recovery_duration": {"seconds": 751, "nanos": 543}, + "memory_table_gbs": 1691, + "is_local_data_guard_enabled": True, + "local_adg_auto_failover_max_data_loss_limit": 4513, + "local_standby_db": { + "lag_time_duration": {}, + "lifecycle_details": "lifecycle_details_value", + "state": 1, + "data_guard_role_changed_time": {"seconds": 751, "nanos": 543}, + "disaster_recovery_role_changed_time": {}, + }, + "memory_per_oracle_compute_unit_gbs": 3626, + "local_disaster_recovery_type": 1, + "data_safe_state": 1, + "database_management_state": 1, + "open_mode": 1, + "operations_insights_state": 1, + "peer_db_ids": ["peer_db_ids_value1", "peer_db_ids_value2"], + "permission_level": 1, + "private_endpoint": "private_endpoint_value", + "refreshable_mode": 1, + "refreshable_state": 1, + "role": 1, + "scheduled_operation_details": [ + { + "day_of_week": 1, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "stop_time": {}, + } + ], + "sql_web_developer_url": "sql_web_developer_url_value", + "supported_clone_regions": [ + "supported_clone_regions_value1", + "supported_clone_regions_value2", + ], + "used_data_storage_size_tbs": 2752, + "oci_url": "oci_url_value", + "total_auto_backup_storage_size_gbs": 0.36100000000000004, + "next_long_term_backup_time": {}, + "maintenance_begin_time": {}, + "maintenance_end_time": {}, + }, + "labels": {}, + "network": "network_value", + "cidr": "cidr_value", + "create_time": {}, + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = oracledatabase.CreateAutonomousDatabaseRequest.meta.fields["autonomous_database"] + test_field = oracledatabase.CreateAutonomousDatabaseRequest.meta.fields[ + "autonomous_database" + ] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -5373,7 +6796,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -5387,7 +6810,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["autonomous_database"].items(): # pragma: NO COVER + for field, value in request_init["autonomous_database"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -5402,12 +6825,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -5420,22 +6847,23 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.create_autonomous_database(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" + def test_create_autonomous_database_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5450,12 +6878,19 @@ def test_create_autonomous_database_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_autonomous_database in client._transport._wrapped_methods + assert ( + client._transport.create_autonomous_database + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_autonomous_database] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_autonomous_database + ] = mock_rpc request = {} client.create_autonomous_database(request) @@ -5474,7 +6909,9 @@ def test_create_autonomous_database_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_create_autonomous_database_rest_required_fields(request_type=oracledatabase.CreateAutonomousDatabaseRequest): +def test_create_autonomous_database_rest_required_fields( + request_type=oracledatabase.CreateAutonomousDatabaseRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} @@ -5482,65 +6919,76 @@ def test_create_autonomous_database_rest_required_fields(request_type=oracledata request_init["autonomous_database_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "autonomousDatabaseId" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_autonomous_database._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_autonomous_database._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "autonomousDatabaseId" in jsonified_request - assert jsonified_request["autonomousDatabaseId"] == request_init["autonomous_database_id"] + assert ( + jsonified_request["autonomousDatabaseId"] + == request_init["autonomous_database_id"] + ) - jsonified_request["parent"] = 'parent_value' - jsonified_request["autonomousDatabaseId"] = 'autonomous_database_id_value' + jsonified_request["parent"] = "parent_value" + jsonified_request["autonomousDatabaseId"] = "autonomous_database_id_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_autonomous_database._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_autonomous_database._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("autonomous_database_id", "request_id", )) + assert not set(unset_fields) - set( + ( + "autonomous_database_id", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" assert "autonomousDatabaseId" in jsonified_request - assert jsonified_request["autonomousDatabaseId"] == 'autonomous_database_id_value' + assert jsonified_request["autonomousDatabaseId"] == "autonomous_database_id_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.create_autonomous_database(request) @@ -5550,34 +6998,60 @@ def test_create_autonomous_database_rest_required_fields(request_type=oracledata "autonomousDatabaseId", "", ), - ('$alt', 'json;enum-encoding=int') + ("$alt", "json;enum-encoding=int"), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_create_autonomous_database_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.create_autonomous_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(("autonomousDatabaseId", "requestId", )) & set(("parent", "autonomousDatabaseId", "autonomousDatabase", ))) + assert set(unset_fields) == ( + set( + ( + "autonomousDatabaseId", + "requestId", + ) + ) + & set( + ( + "parent", + "autonomousDatabaseId", + "autonomousDatabase", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_create_autonomous_database_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_create_autonomous_database") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_create_autonomous_database") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_create_autonomous_database" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_create_autonomous_database" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.CreateAutonomousDatabaseRequest.pb(oracledatabase.CreateAutonomousDatabaseRequest()) + pb_message = oracledatabase.CreateAutonomousDatabaseRequest.pb( + oracledatabase.CreateAutonomousDatabaseRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -5588,34 +7062,46 @@ def test_create_autonomous_database_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) request = oracledatabase.CreateAutonomousDatabaseRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_autonomous_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.create_autonomous_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_create_autonomous_database_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.CreateAutonomousDatabaseRequest): +def test_create_autonomous_database_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.CreateAutonomousDatabaseRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -5631,18 +7117,20 @@ def test_create_autonomous_database_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', - autonomous_database=gco_autonomous_database.AutonomousDatabase(name='name_value'), - autonomous_database_id='autonomous_database_id_value', + parent="parent_value", + autonomous_database=gco_autonomous_database.AutonomousDatabase( + name="name_value" + ), + autonomous_database_id="autonomous_database_id_value", ) mock_args.update(sample_request) @@ -5650,7 +7138,7 @@ def test_create_autonomous_database_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.create_autonomous_database(**mock_args) @@ -5659,10 +7147,14 @@ def test_create_autonomous_database_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/autonomousDatabases" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/autonomousDatabases" + % client.transport._host, + args[1], + ) -def test_create_autonomous_database_rest_flattened_error(transport: str = 'rest'): +def test_create_autonomous_database_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5673,23 +7165,27 @@ def test_create_autonomous_database_rest_flattened_error(transport: str = 'rest' with pytest.raises(ValueError): client.create_autonomous_database( oracledatabase.CreateAutonomousDatabaseRequest(), - parent='parent_value', - autonomous_database=gco_autonomous_database.AutonomousDatabase(name='name_value'), - autonomous_database_id='autonomous_database_id_value', + parent="parent_value", + autonomous_database=gco_autonomous_database.AutonomousDatabase( + name="name_value" + ), + autonomous_database_id="autonomous_database_id_value", ) def test_create_autonomous_database_rest_error(): client = OracleDatabaseClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) -@pytest.mark.parametrize("request_type", [ - oracledatabase.DeleteAutonomousDatabaseRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.DeleteAutonomousDatabaseRequest, + dict, + ], +) def test_delete_autonomous_database_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5697,26 +7193,29 @@ def test_delete_autonomous_database_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_autonomous_database(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" + def test_delete_autonomous_database_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5731,12 +7230,19 @@ def test_delete_autonomous_database_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_autonomous_database in client._transport._wrapped_methods + assert ( + client._transport.delete_autonomous_database + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_autonomous_database] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_autonomous_database + ] = mock_rpc request = {} client.delete_autonomous_database(request) @@ -5755,57 +7261,62 @@ def test_delete_autonomous_database_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_delete_autonomous_database_rest_required_fields(request_type=oracledatabase.DeleteAutonomousDatabaseRequest): +def test_delete_autonomous_database_rest_required_fields( + request_type=oracledatabase.DeleteAutonomousDatabaseRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_autonomous_database._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_autonomous_database._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_autonomous_database._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_autonomous_database._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id", )) + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -5813,40 +7324,50 @@ def test_delete_autonomous_database_rest_required_fields(request_type=oracledata response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_autonomous_database(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_delete_autonomous_database_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.delete_autonomous_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId", )) & set(("name", ))) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_delete_autonomous_database_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_delete_autonomous_database") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_delete_autonomous_database") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_delete_autonomous_database" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_delete_autonomous_database" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.DeleteAutonomousDatabaseRequest.pb(oracledatabase.DeleteAutonomousDatabaseRequest()) + pb_message = oracledatabase.DeleteAutonomousDatabaseRequest.pb( + oracledatabase.DeleteAutonomousDatabaseRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -5857,34 +7378,48 @@ def test_delete_autonomous_database_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) request = oracledatabase.DeleteAutonomousDatabaseRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_autonomous_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.delete_autonomous_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_delete_autonomous_database_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.DeleteAutonomousDatabaseRequest): +def test_delete_autonomous_database_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.DeleteAutonomousDatabaseRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -5900,16 +7435,18 @@ def test_delete_autonomous_database_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -5917,7 +7454,7 @@ def test_delete_autonomous_database_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.delete_autonomous_database(**mock_args) @@ -5926,10 +7463,14 @@ def test_delete_autonomous_database_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/autonomousDatabases/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/autonomousDatabases/*}" + % client.transport._host, + args[1], + ) -def test_delete_autonomous_database_rest_flattened_error(transport: str = 'rest'): +def test_delete_autonomous_database_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5940,21 +7481,23 @@ def test_delete_autonomous_database_rest_flattened_error(transport: str = 'rest' with pytest.raises(ValueError): client.delete_autonomous_database( oracledatabase.DeleteAutonomousDatabaseRequest(), - name='name_value', + name="name_value", ) def test_delete_autonomous_database_rest_error(): client = OracleDatabaseClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) -@pytest.mark.parametrize("request_type", [ - oracledatabase.RestoreAutonomousDatabaseRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.RestoreAutonomousDatabaseRequest, + dict, + ], +) def test_restore_autonomous_database_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5962,26 +7505,29 @@ def test_restore_autonomous_database_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.restore_autonomous_database(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" + def test_restore_autonomous_database_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5996,12 +7542,19 @@ def test_restore_autonomous_database_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.restore_autonomous_database in client._transport._wrapped_methods + assert ( + client._transport.restore_autonomous_database + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.restore_autonomous_database] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.restore_autonomous_database + ] = mock_rpc request = {} client.restore_autonomous_database(request) @@ -6020,97 +7573,120 @@ def test_restore_autonomous_database_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_restore_autonomous_database_rest_required_fields(request_type=oracledatabase.RestoreAutonomousDatabaseRequest): +def test_restore_autonomous_database_rest_required_fields( + request_type=oracledatabase.RestoreAutonomousDatabaseRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).restore_autonomous_database._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restore_autonomous_database._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).restore_autonomous_database._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restore_autonomous_database._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.restore_autonomous_database(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_restore_autonomous_database_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.restore_autonomous_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "restoreTime", ))) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "restoreTime", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_restore_autonomous_database_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_restore_autonomous_database") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_restore_autonomous_database") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_restore_autonomous_database" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_restore_autonomous_database" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.RestoreAutonomousDatabaseRequest.pb(oracledatabase.RestoreAutonomousDatabaseRequest()) + pb_message = oracledatabase.RestoreAutonomousDatabaseRequest.pb( + oracledatabase.RestoreAutonomousDatabaseRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -6121,34 +7697,49 @@ def test_restore_autonomous_database_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) request = oracledatabase.RestoreAutonomousDatabaseRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.restore_autonomous_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.restore_autonomous_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_restore_autonomous_database_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.RestoreAutonomousDatabaseRequest): +def test_restore_autonomous_database_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.RestoreAutonomousDatabaseRequest, +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -6164,16 +7755,18 @@ def test_restore_autonomous_database_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", restore_time=timestamp_pb2.Timestamp(seconds=751), ) mock_args.update(sample_request) @@ -6182,7 +7775,7 @@ def test_restore_autonomous_database_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.restore_autonomous_database(**mock_args) @@ -6191,10 +7784,14 @@ def test_restore_autonomous_database_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/autonomousDatabases/*}:restore" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/autonomousDatabases/*}:restore" + % client.transport._host, + args[1], + ) -def test_restore_autonomous_database_rest_flattened_error(transport: str = 'rest'): +def test_restore_autonomous_database_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6205,22 +7802,24 @@ def test_restore_autonomous_database_rest_flattened_error(transport: str = 'rest with pytest.raises(ValueError): client.restore_autonomous_database( oracledatabase.RestoreAutonomousDatabaseRequest(), - name='name_value', + name="name_value", restore_time=timestamp_pb2.Timestamp(seconds=751), ) def test_restore_autonomous_database_rest_error(): client = OracleDatabaseClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) -@pytest.mark.parametrize("request_type", [ - oracledatabase.GenerateAutonomousDatabaseWalletRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.GenerateAutonomousDatabaseWalletRequest, + dict, + ], +) def test_generate_autonomous_database_wallet_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6228,30 +7827,35 @@ def test_generate_autonomous_database_wallet_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse( - archive_content=b'archive_content_blob', + archive_content=b"archive_content_blob", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse.pb(return_value) + return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.generate_autonomous_database_wallet(request) # Establish that the response is the type that we expect. assert isinstance(response, oracledatabase.GenerateAutonomousDatabaseWalletResponse) - assert response.archive_content == b'archive_content_blob' + assert response.archive_content == b"archive_content_blob" + def test_generate_autonomous_database_wallet_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6267,12 +7871,19 @@ def test_generate_autonomous_database_wallet_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.generate_autonomous_database_wallet in client._transport._wrapped_methods + assert ( + client._transport.generate_autonomous_database_wallet + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.generate_autonomous_database_wallet] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.generate_autonomous_database_wallet + ] = mock_rpc request = {} client.generate_autonomous_database_wallet(request) @@ -6287,7 +7898,9 @@ def test_generate_autonomous_database_wallet_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_generate_autonomous_database_wallet_rest_required_fields(request_type=oracledatabase.GenerateAutonomousDatabaseWalletRequest): +def test_generate_autonomous_database_wallet_rest_required_fields( + request_type=oracledatabase.GenerateAutonomousDatabaseWalletRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} @@ -6295,95 +7908,121 @@ def test_generate_autonomous_database_wallet_rest_required_fields(request_type=o request_init["password"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_autonomous_database_wallet._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_autonomous_database_wallet._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' - jsonified_request["password"] = 'password_value' + jsonified_request["name"] = "name_value" + jsonified_request["password"] = "password_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_autonomous_database_wallet._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_autonomous_database_wallet._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" assert "password" in jsonified_request - assert jsonified_request["password"] == 'password_value' + assert jsonified_request["password"] == "password_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse.pb(return_value) + return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.generate_autonomous_database_wallet(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_generate_autonomous_database_wallet_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - unset_fields = transport.generate_autonomous_database_wallet._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "password", ))) + unset_fields = ( + transport.generate_autonomous_database_wallet._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "password", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_generate_autonomous_database_wallet_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_generate_autonomous_database_wallet") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_generate_autonomous_database_wallet") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "post_generate_autonomous_database_wallet", + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "pre_generate_autonomous_database_wallet", + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.GenerateAutonomousDatabaseWalletRequest.pb(oracledatabase.GenerateAutonomousDatabaseWalletRequest()) + pb_message = oracledatabase.GenerateAutonomousDatabaseWalletRequest.pb( + oracledatabase.GenerateAutonomousDatabaseWalletRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -6394,34 +8033,51 @@ def test_generate_autonomous_database_wallet_rest_interceptors(null_interceptor) req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = oracledatabase.GenerateAutonomousDatabaseWalletResponse.to_json(oracledatabase.GenerateAutonomousDatabaseWalletResponse()) + req.return_value._content = ( + oracledatabase.GenerateAutonomousDatabaseWalletResponse.to_json( + oracledatabase.GenerateAutonomousDatabaseWalletResponse() + ) + ) request = oracledatabase.GenerateAutonomousDatabaseWalletRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse() - client.generate_autonomous_database_wallet(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.generate_autonomous_database_wallet( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_generate_autonomous_database_wallet_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.GenerateAutonomousDatabaseWalletRequest): +def test_generate_autonomous_database_wallet_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.GenerateAutonomousDatabaseWalletRequest, +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -6437,19 +8093,21 @@ def test_generate_autonomous_database_wallet_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", type_=autonomous_database.GenerateType.ALL, is_regional=True, - password='password_value', + password="password_value", ) mock_args.update(sample_request) @@ -6457,9 +8115,11 @@ def test_generate_autonomous_database_wallet_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse.pb(return_value) + return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.generate_autonomous_database_wallet(**mock_args) @@ -6468,10 +8128,16 @@ def test_generate_autonomous_database_wallet_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/autonomousDatabases/*}:generateWallet" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/autonomousDatabases/*}:generateWallet" + % client.transport._host, + args[1], + ) -def test_generate_autonomous_database_wallet_rest_flattened_error(transport: str = 'rest'): +def test_generate_autonomous_database_wallet_rest_flattened_error( + transport: str = "rest", +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6482,24 +8148,26 @@ def test_generate_autonomous_database_wallet_rest_flattened_error(transport: str with pytest.raises(ValueError): client.generate_autonomous_database_wallet( oracledatabase.GenerateAutonomousDatabaseWalletRequest(), - name='name_value', + name="name_value", type_=autonomous_database.GenerateType.ALL, is_regional=True, - password='password_value', + password="password_value", ) def test_generate_autonomous_database_wallet_rest_error(): client = OracleDatabaseClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) -@pytest.mark.parametrize("request_type", [ - oracledatabase.ListAutonomousDbVersionsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListAutonomousDbVersionsRequest, + dict, + ], +) def test_list_autonomous_db_versions_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6507,14 +8175,14 @@ def test_list_autonomous_db_versions_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListAutonomousDbVersionsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -6524,13 +8192,14 @@ def test_list_autonomous_db_versions_rest(request_type): return_value = oracledatabase.ListAutonomousDbVersionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_autonomous_db_versions(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAutonomousDbVersionsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" + def test_list_autonomous_db_versions_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6546,12 +8215,19 @@ def test_list_autonomous_db_versions_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_autonomous_db_versions in client._transport._wrapped_methods + assert ( + client._transport.list_autonomous_db_versions + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_autonomous_db_versions] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_autonomous_db_versions + ] = mock_rpc request = {} client.list_autonomous_db_versions(request) @@ -6566,57 +8242,67 @@ def test_list_autonomous_db_versions_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_autonomous_db_versions_rest_required_fields(request_type=oracledatabase.ListAutonomousDbVersionsRequest): +def test_list_autonomous_db_versions_rest_required_fields( + request_type=oracledatabase.ListAutonomousDbVersionsRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_autonomous_db_versions._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_db_versions._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_autonomous_db_versions._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_db_versions._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = oracledatabase.ListAutonomousDbVersionsResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -6624,42 +8310,61 @@ def test_list_autonomous_db_versions_rest_required_fields(request_type=oracledat response_value.status_code = 200 # Convert return value to protobuf type - return_value = oracledatabase.ListAutonomousDbVersionsResponse.pb(return_value) + return_value = oracledatabase.ListAutonomousDbVersionsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_autonomous_db_versions(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_autonomous_db_versions_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_autonomous_db_versions._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_autonomous_db_versions_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_autonomous_db_versions") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_autonomous_db_versions") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_autonomous_db_versions" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_autonomous_db_versions" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.ListAutonomousDbVersionsRequest.pb(oracledatabase.ListAutonomousDbVersionsRequest()) + pb_message = oracledatabase.ListAutonomousDbVersionsRequest.pb( + oracledatabase.ListAutonomousDbVersionsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -6670,34 +8375,48 @@ def test_list_autonomous_db_versions_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = oracledatabase.ListAutonomousDbVersionsResponse.to_json(oracledatabase.ListAutonomousDbVersionsResponse()) + req.return_value._content = ( + oracledatabase.ListAutonomousDbVersionsResponse.to_json( + oracledatabase.ListAutonomousDbVersionsResponse() + ) + ) request = oracledatabase.ListAutonomousDbVersionsRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = oracledatabase.ListAutonomousDbVersionsResponse() - client.list_autonomous_db_versions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_autonomous_db_versions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_list_autonomous_db_versions_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListAutonomousDbVersionsRequest): +def test_list_autonomous_db_versions_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListAutonomousDbVersionsRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -6713,16 +8432,16 @@ def test_list_autonomous_db_versions_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListAutonomousDbVersionsResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -6732,7 +8451,7 @@ def test_list_autonomous_db_versions_rest_flattened(): # Convert return value to protobuf type return_value = oracledatabase.ListAutonomousDbVersionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.list_autonomous_db_versions(**mock_args) @@ -6741,10 +8460,14 @@ def test_list_autonomous_db_versions_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/autonomousDbVersions" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/autonomousDbVersions" + % client.transport._host, + args[1], + ) -def test_list_autonomous_db_versions_rest_flattened_error(transport: str = 'rest'): +def test_list_autonomous_db_versions_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6755,20 +8478,20 @@ def test_list_autonomous_db_versions_rest_flattened_error(transport: str = 'rest with pytest.raises(ValueError): client.list_autonomous_db_versions( oracledatabase.ListAutonomousDbVersionsRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_autonomous_db_versions_rest_pager(transport: str = 'rest'): +def test_list_autonomous_db_versions_rest_pager(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( oracledatabase.ListAutonomousDbVersionsResponse( @@ -6777,17 +8500,17 @@ def test_list_autonomous_db_versions_rest_pager(transport: str = 'rest'): autonomous_db_version.AutonomousDbVersion(), autonomous_db_version.AutonomousDbVersion(), ], - next_page_token='abc', + next_page_token="abc", ), oracledatabase.ListAutonomousDbVersionsResponse( autonomous_db_versions=[], - next_page_token='def', + next_page_token="def", ), oracledatabase.ListAutonomousDbVersionsResponse( autonomous_db_versions=[ autonomous_db_version.AutonomousDbVersion(), ], - next_page_token='ghi', + next_page_token="ghi", ), oracledatabase.ListAutonomousDbVersionsResponse( autonomous_db_versions=[ @@ -6800,31 +8523,37 @@ def test_list_autonomous_db_versions_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(oracledatabase.ListAutonomousDbVersionsResponse.to_json(x) for x in response) + response = tuple( + oracledatabase.ListAutonomousDbVersionsResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} pager = client.list_autonomous_db_versions(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, autonomous_db_version.AutonomousDbVersion) - for i in results) + assert all( + isinstance(i, autonomous_db_version.AutonomousDbVersion) for i in results + ) pages = list(client.list_autonomous_db_versions(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, + dict, + ], +) def test_list_autonomous_database_character_sets_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6832,30 +8561,33 @@ def test_list_autonomous_database_character_sets_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.pb(return_value) + return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_autonomous_database_character_sets(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAutonomousDatabaseCharacterSetsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" + def test_list_autonomous_database_character_sets_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6871,12 +8603,19 @@ def test_list_autonomous_database_character_sets_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_autonomous_database_character_sets in client._transport._wrapped_methods + assert ( + client._transport.list_autonomous_database_character_sets + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_autonomous_database_character_sets] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_autonomous_database_character_sets + ] = mock_rpc request = {} client.list_autonomous_database_character_sets(request) @@ -6891,57 +8630,72 @@ def test_list_autonomous_database_character_sets_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_autonomous_database_character_sets_rest_required_fields(request_type=oracledatabase.ListAutonomousDatabaseCharacterSetsRequest): +def test_list_autonomous_database_character_sets_rest_required_fields( + request_type=oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_autonomous_database_character_sets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_database_character_sets._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_autonomous_database_character_sets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_database_character_sets._get_unset_required_fields( + jsonified_request + ) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -6949,42 +8703,68 @@ def test_list_autonomous_database_character_sets_rest_required_fields(request_ty response_value.status_code = 200 # Convert return value to protobuf type - return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.pb(return_value) + return_value = ( + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.pb( + return_value + ) + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_autonomous_database_character_sets(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_autonomous_database_character_sets_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - unset_fields = transport.list_autonomous_database_character_sets._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + unset_fields = ( + transport.list_autonomous_database_character_sets._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_autonomous_database_character_sets_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_autonomous_database_character_sets") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_autonomous_database_character_sets") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "post_list_autonomous_database_character_sets", + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "pre_list_autonomous_database_character_sets", + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest.pb(oracledatabase.ListAutonomousDatabaseCharacterSetsRequest()) + pb_message = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest.pb( + oracledatabase.ListAutonomousDatabaseCharacterSetsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -6995,34 +8775,49 @@ def test_list_autonomous_database_character_sets_rest_interceptors(null_intercep req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.to_json(oracledatabase.ListAutonomousDatabaseCharacterSetsResponse()) + req.return_value._content = ( + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.to_json( + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse() + ) + ) request = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse() - client.list_autonomous_database_character_sets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_autonomous_database_character_sets( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_list_autonomous_database_character_sets_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListAutonomousDatabaseCharacterSetsRequest): +def test_list_autonomous_database_character_sets_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -7038,16 +8833,16 @@ def test_list_autonomous_database_character_sets_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -7055,9 +8850,11 @@ def test_list_autonomous_database_character_sets_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.pb(return_value) + return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.list_autonomous_database_character_sets(**mock_args) @@ -7066,10 +8863,16 @@ def test_list_autonomous_database_character_sets_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/autonomousDatabaseCharacterSets" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/autonomousDatabaseCharacterSets" + % client.transport._host, + args[1], + ) -def test_list_autonomous_database_character_sets_rest_flattened_error(transport: str = 'rest'): +def test_list_autonomous_database_character_sets_rest_flattened_error( + transport: str = "rest", +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7080,20 +8883,20 @@ def test_list_autonomous_database_character_sets_rest_flattened_error(transport: with pytest.raises(ValueError): client.list_autonomous_database_character_sets( oracledatabase.ListAutonomousDatabaseCharacterSetsRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_autonomous_database_character_sets_rest_pager(transport: str = 'rest'): +def test_list_autonomous_database_character_sets_rest_pager(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( oracledatabase.ListAutonomousDatabaseCharacterSetsResponse( @@ -7102,17 +8905,17 @@ def test_list_autonomous_database_character_sets_rest_pager(transport: str = 're autonomous_database_character_set.AutonomousDatabaseCharacterSet(), autonomous_database_character_set.AutonomousDatabaseCharacterSet(), ], - next_page_token='abc', + next_page_token="abc", ), oracledatabase.ListAutonomousDatabaseCharacterSetsResponse( autonomous_database_character_sets=[], - next_page_token='def', + next_page_token="def", ), oracledatabase.ListAutonomousDatabaseCharacterSetsResponse( autonomous_database_character_sets=[ autonomous_database_character_set.AutonomousDatabaseCharacterSet(), ], - next_page_token='ghi', + next_page_token="ghi", ), oracledatabase.ListAutonomousDatabaseCharacterSetsResponse( autonomous_database_character_sets=[ @@ -7125,31 +8928,43 @@ def test_list_autonomous_database_character_sets_rest_pager(transport: str = 're response = response + response # Wrap the values into proper Response objs - response = tuple(oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.to_json(x) for x in response) + response = tuple( + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.to_json(x) + for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} pager = client.list_autonomous_database_character_sets(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, autonomous_database_character_set.AutonomousDatabaseCharacterSet) - for i in results) + assert all( + isinstance( + i, autonomous_database_character_set.AutonomousDatabaseCharacterSet + ) + for i in results + ) - pages = list(client.list_autonomous_database_character_sets(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + pages = list( + client.list_autonomous_database_character_sets(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - oracledatabase.ListAutonomousDatabaseBackupsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListAutonomousDatabaseBackupsRequest, + dict, + ], +) def test_list_autonomous_database_backups_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7157,30 +8972,33 @@ def test_list_autonomous_database_backups_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse.pb(return_value) + return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_autonomous_database_backups(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAutonomousDatabaseBackupsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" + def test_list_autonomous_database_backups_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7196,12 +9014,19 @@ def test_list_autonomous_database_backups_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_autonomous_database_backups in client._transport._wrapped_methods + assert ( + client._transport.list_autonomous_database_backups + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_autonomous_database_backups] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_autonomous_database_backups + ] = mock_rpc request = {} client.list_autonomous_database_backups(request) @@ -7216,57 +9041,68 @@ def test_list_autonomous_database_backups_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_autonomous_database_backups_rest_required_fields(request_type=oracledatabase.ListAutonomousDatabaseBackupsRequest): +def test_list_autonomous_database_backups_rest_required_fields( + request_type=oracledatabase.ListAutonomousDatabaseBackupsRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_autonomous_database_backups._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_database_backups._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_autonomous_database_backups._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_database_backups._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -7274,42 +9110,65 @@ def test_list_autonomous_database_backups_rest_required_fields(request_type=orac response_value.status_code = 200 # Convert return value to protobuf type - return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse.pb(return_value) + return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_autonomous_database_backups(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_autonomous_database_backups_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - unset_fields = transport.list_autonomous_database_backups._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + unset_fields = ( + transport.list_autonomous_database_backups._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_autonomous_database_backups_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_autonomous_database_backups") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_autonomous_database_backups") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "post_list_autonomous_database_backups", + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_autonomous_database_backups" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.ListAutonomousDatabaseBackupsRequest.pb(oracledatabase.ListAutonomousDatabaseBackupsRequest()) + pb_message = oracledatabase.ListAutonomousDatabaseBackupsRequest.pb( + oracledatabase.ListAutonomousDatabaseBackupsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -7320,34 +9179,49 @@ def test_list_autonomous_database_backups_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = oracledatabase.ListAutonomousDatabaseBackupsResponse.to_json(oracledatabase.ListAutonomousDatabaseBackupsResponse()) + req.return_value._content = ( + oracledatabase.ListAutonomousDatabaseBackupsResponse.to_json( + oracledatabase.ListAutonomousDatabaseBackupsResponse() + ) + ) request = oracledatabase.ListAutonomousDatabaseBackupsRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse() - client.list_autonomous_database_backups(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_autonomous_database_backups( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_list_autonomous_database_backups_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListAutonomousDatabaseBackupsRequest): +def test_list_autonomous_database_backups_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.ListAutonomousDatabaseBackupsRequest, +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -7363,16 +9237,16 @@ def test_list_autonomous_database_backups_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -7380,9 +9254,11 @@ def test_list_autonomous_database_backups_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse.pb(return_value) + return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.list_autonomous_database_backups(**mock_args) @@ -7391,10 +9267,14 @@ def test_list_autonomous_database_backups_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/autonomousDatabaseBackups" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/autonomousDatabaseBackups" + % client.transport._host, + args[1], + ) -def test_list_autonomous_database_backups_rest_flattened_error(transport: str = 'rest'): +def test_list_autonomous_database_backups_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7405,20 +9285,20 @@ def test_list_autonomous_database_backups_rest_flattened_error(transport: str = with pytest.raises(ValueError): client.list_autonomous_database_backups( oracledatabase.ListAutonomousDatabaseBackupsRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_autonomous_database_backups_rest_pager(transport: str = 'rest'): +def test_list_autonomous_database_backups_rest_pager(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( oracledatabase.ListAutonomousDatabaseBackupsResponse( @@ -7427,17 +9307,17 @@ def test_list_autonomous_database_backups_rest_pager(transport: str = 'rest'): autonomous_db_backup.AutonomousDatabaseBackup(), autonomous_db_backup.AutonomousDatabaseBackup(), ], - next_page_token='abc', + next_page_token="abc", ), oracledatabase.ListAutonomousDatabaseBackupsResponse( autonomous_database_backups=[], - next_page_token='def', + next_page_token="def", ), oracledatabase.ListAutonomousDatabaseBackupsResponse( autonomous_database_backups=[ autonomous_db_backup.AutonomousDatabaseBackup(), ], - next_page_token='ghi', + next_page_token="ghi", ), oracledatabase.ListAutonomousDatabaseBackupsResponse( autonomous_database_backups=[ @@ -7450,24 +9330,31 @@ def test_list_autonomous_database_backups_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(oracledatabase.ListAutonomousDatabaseBackupsResponse.to_json(x) for x in response) + response = tuple( + oracledatabase.ListAutonomousDatabaseBackupsResponse.to_json(x) + for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} pager = client.list_autonomous_database_backups(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, autonomous_db_backup.AutonomousDatabaseBackup) - for i in results) + assert all( + isinstance(i, autonomous_db_backup.AutonomousDatabaseBackup) + for i in results + ) - pages = list(client.list_autonomous_database_backups(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + pages = list( + client.list_autonomous_database_backups(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -7509,8 +9396,7 @@ def test_credentials_transport_error(): options.api_key = "api_key" with pytest.raises(ValueError): client = OracleDatabaseClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() + client_options=options, credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. @@ -7533,19 +9419,26 @@ def test_transport_instance(): assert client.transport is transport -@pytest.mark.parametrize("transport_class", [ - transports.OracleDatabaseRestTransport, -]) +@pytest.mark.parametrize( + "transport_class", + [ + transports.OracleDatabaseRestTransport, + ], +) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() -@pytest.mark.parametrize("transport_name", [ - "rest", -]) + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) def test_transport_kind(transport_name): transport = OracleDatabaseClient.get_transport_class(transport_name)( credentials=ga_credentials.AnonymousCredentials(), @@ -7558,13 +9451,15 @@ def test_oracle_database_base_transport_error(): with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.OracleDatabaseTransport( credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" + credentials_file="credentials.json", ) def test_oracle_database_base_transport(): # Instantiate the base transport. - with mock.patch('google.cloud.oracledatabase_v1.services.oracle_database.transports.OracleDatabaseTransport.__init__') as Transport: + with mock.patch( + "google.cloud.oracledatabase_v1.services.oracle_database.transports.OracleDatabaseTransport.__init__" + ) as Transport: Transport.return_value = None transport = transports.OracleDatabaseTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -7573,34 +9468,34 @@ def test_oracle_database_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( - 'list_cloud_exadata_infrastructures', - 'get_cloud_exadata_infrastructure', - 'create_cloud_exadata_infrastructure', - 'delete_cloud_exadata_infrastructure', - 'list_cloud_vm_clusters', - 'get_cloud_vm_cluster', - 'create_cloud_vm_cluster', - 'delete_cloud_vm_cluster', - 'list_entitlements', - 'list_db_servers', - 'list_db_nodes', - 'list_gi_versions', - 'list_db_system_shapes', - 'list_autonomous_databases', - 'get_autonomous_database', - 'create_autonomous_database', - 'delete_autonomous_database', - 'restore_autonomous_database', - 'generate_autonomous_database_wallet', - 'list_autonomous_db_versions', - 'list_autonomous_database_character_sets', - 'list_autonomous_database_backups', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', + "list_cloud_exadata_infrastructures", + "get_cloud_exadata_infrastructure", + "create_cloud_exadata_infrastructure", + "delete_cloud_exadata_infrastructure", + "list_cloud_vm_clusters", + "get_cloud_vm_cluster", + "create_cloud_vm_cluster", + "delete_cloud_vm_cluster", + "list_entitlements", + "list_db_servers", + "list_db_nodes", + "list_gi_versions", + "list_db_system_shapes", + "list_autonomous_databases", + "get_autonomous_database", + "create_autonomous_database", + "delete_autonomous_database", + "restore_autonomous_database", + "generate_autonomous_database_wallet", + "list_autonomous_db_versions", + "list_autonomous_database_character_sets", + "list_autonomous_database_backups", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -7616,7 +9511,7 @@ def test_oracle_database_base_transport(): # Catch all for all remaining methods and properties remainder = [ - 'kind', + "kind", ] for r in remainder: with pytest.raises(NotImplementedError): @@ -7625,25 +9520,30 @@ def test_oracle_database_base_transport(): def test_oracle_database_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.oracledatabase_v1.services.oracle_database.transports.OracleDatabaseTransport._prep_wrapped_messages') as Transport: + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.oracledatabase_v1.services.oracle_database.transports.OracleDatabaseTransport._prep_wrapped_messages" + ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.OracleDatabaseTransport( credentials_file="credentials.json", quota_project_id="octopus", ) - load_creds.assert_called_once_with("credentials.json", + load_creds.assert_called_once_with( + "credentials.json", scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) def test_oracle_database_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.oracledatabase_v1.services.oracle_database.transports.OracleDatabaseTransport._prep_wrapped_messages') as Transport: + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.oracledatabase_v1.services.oracle_database.transports.OracleDatabaseTransport._prep_wrapped_messages" + ) as Transport: Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.OracleDatabaseTransport() @@ -7652,24 +9552,23 @@ def test_oracle_database_base_transport_with_adc(): def test_oracle_database_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) OracleDatabaseClient() adc.assert_called_once_with( scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id=None, ) def test_oracle_database_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.OracleDatabaseRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.OracleDatabaseRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) @@ -7677,7 +9576,7 @@ def test_oracle_database_http_transport_client_cert_source_for_mtls(): def test_oracle_database_rest_lro_client(): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) transport = client.transport @@ -7691,39 +9590,54 @@ def test_oracle_database_rest_lro_client(): assert transport.operations_client is transport.operations_client -@pytest.mark.parametrize("transport_name", [ - "rest", -]) +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) def test_oracle_database_host_no_port(transport_name): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='oracledatabase.googleapis.com'), - transport=transport_name, + client_options=client_options.ClientOptions( + api_endpoint="oracledatabase.googleapis.com" + ), + transport=transport_name, ) assert client.transport._host == ( - 'oracledatabase.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://oracledatabase.googleapis.com' + "oracledatabase.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://oracledatabase.googleapis.com" ) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) def test_oracle_database_host_with_port(transport_name): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='oracledatabase.googleapis.com:8000'), + client_options=client_options.ClientOptions( + api_endpoint="oracledatabase.googleapis.com:8000" + ), transport=transport_name, ) assert client.transport._host == ( - 'oracledatabase.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://oracledatabase.googleapis.com:8000' + "oracledatabase.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://oracledatabase.googleapis.com:8000" ) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) def test_oracle_database_client_transport_session_collision(transport_name): creds1 = ga_credentials.AnonymousCredentials() creds2 = ga_credentials.AnonymousCredentials() @@ -7802,12 +9716,19 @@ def test_oracle_database_client_transport_session_collision(transport_name): session2 = client2.transport.list_autonomous_database_backups._session assert session1 != session2 + def test_autonomous_database_path(): project = "squid" location = "clam" autonomous_database = "whelk" - expected = "projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}".format(project=project, location=location, autonomous_database=autonomous_database, ) - actual = OracleDatabaseClient.autonomous_database_path(project, location, autonomous_database) + expected = "projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}".format( + project=project, + location=location, + autonomous_database=autonomous_database, + ) + actual = OracleDatabaseClient.autonomous_database_path( + project, location, autonomous_database + ) assert expected == actual @@ -7823,12 +9744,19 @@ def test_parse_autonomous_database_path(): actual = OracleDatabaseClient.parse_autonomous_database_path(path) assert expected == actual + def test_autonomous_database_backup_path(): project = "cuttlefish" location = "mussel" autonomous_database_backup = "winkle" - expected = "projects/{project}/locations/{location}/autonomousDatabaseBackups/{autonomous_database_backup}".format(project=project, location=location, autonomous_database_backup=autonomous_database_backup, ) - actual = OracleDatabaseClient.autonomous_database_backup_path(project, location, autonomous_database_backup) + expected = "projects/{project}/locations/{location}/autonomousDatabaseBackups/{autonomous_database_backup}".format( + project=project, + location=location, + autonomous_database_backup=autonomous_database_backup, + ) + actual = OracleDatabaseClient.autonomous_database_backup_path( + project, location, autonomous_database_backup + ) assert expected == actual @@ -7844,12 +9772,19 @@ def test_parse_autonomous_database_backup_path(): actual = OracleDatabaseClient.parse_autonomous_database_backup_path(path) assert expected == actual + def test_autonomous_database_character_set_path(): project = "squid" location = "clam" autonomous_database_character_set = "whelk" - expected = "projects/{project}/locations/{location}/autonomousDatabaseCharacterSets/{autonomous_database_character_set}".format(project=project, location=location, autonomous_database_character_set=autonomous_database_character_set, ) - actual = OracleDatabaseClient.autonomous_database_character_set_path(project, location, autonomous_database_character_set) + expected = "projects/{project}/locations/{location}/autonomousDatabaseCharacterSets/{autonomous_database_character_set}".format( + project=project, + location=location, + autonomous_database_character_set=autonomous_database_character_set, + ) + actual = OracleDatabaseClient.autonomous_database_character_set_path( + project, location, autonomous_database_character_set + ) assert expected == actual @@ -7865,12 +9800,19 @@ def test_parse_autonomous_database_character_set_path(): actual = OracleDatabaseClient.parse_autonomous_database_character_set_path(path) assert expected == actual + def test_autonomous_db_version_path(): project = "cuttlefish" location = "mussel" autonomous_db_version = "winkle" - expected = "projects/{project}/locations/{location}/autonomousDbVersions/{autonomous_db_version}".format(project=project, location=location, autonomous_db_version=autonomous_db_version, ) - actual = OracleDatabaseClient.autonomous_db_version_path(project, location, autonomous_db_version) + expected = "projects/{project}/locations/{location}/autonomousDbVersions/{autonomous_db_version}".format( + project=project, + location=location, + autonomous_db_version=autonomous_db_version, + ) + actual = OracleDatabaseClient.autonomous_db_version_path( + project, location, autonomous_db_version + ) assert expected == actual @@ -7886,12 +9828,19 @@ def test_parse_autonomous_db_version_path(): actual = OracleDatabaseClient.parse_autonomous_db_version_path(path) assert expected == actual + def test_cloud_exadata_infrastructure_path(): project = "squid" location = "clam" cloud_exadata_infrastructure = "whelk" - expected = "projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}".format(project=project, location=location, cloud_exadata_infrastructure=cloud_exadata_infrastructure, ) - actual = OracleDatabaseClient.cloud_exadata_infrastructure_path(project, location, cloud_exadata_infrastructure) + expected = "projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}".format( + project=project, + location=location, + cloud_exadata_infrastructure=cloud_exadata_infrastructure, + ) + actual = OracleDatabaseClient.cloud_exadata_infrastructure_path( + project, location, cloud_exadata_infrastructure + ) assert expected == actual @@ -7907,12 +9856,19 @@ def test_parse_cloud_exadata_infrastructure_path(): actual = OracleDatabaseClient.parse_cloud_exadata_infrastructure_path(path) assert expected == actual + def test_cloud_vm_cluster_path(): project = "cuttlefish" location = "mussel" cloud_vm_cluster = "winkle" - expected = "projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}".format(project=project, location=location, cloud_vm_cluster=cloud_vm_cluster, ) - actual = OracleDatabaseClient.cloud_vm_cluster_path(project, location, cloud_vm_cluster) + expected = "projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}".format( + project=project, + location=location, + cloud_vm_cluster=cloud_vm_cluster, + ) + actual = OracleDatabaseClient.cloud_vm_cluster_path( + project, location, cloud_vm_cluster + ) assert expected == actual @@ -7928,13 +9884,21 @@ def test_parse_cloud_vm_cluster_path(): actual = OracleDatabaseClient.parse_cloud_vm_cluster_path(path) assert expected == actual + def test_db_node_path(): project = "squid" location = "clam" cloud_vm_cluster = "whelk" db_node = "octopus" - expected = "projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}/dbNodes/{db_node}".format(project=project, location=location, cloud_vm_cluster=cloud_vm_cluster, db_node=db_node, ) - actual = OracleDatabaseClient.db_node_path(project, location, cloud_vm_cluster, db_node) + expected = "projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}/dbNodes/{db_node}".format( + project=project, + location=location, + cloud_vm_cluster=cloud_vm_cluster, + db_node=db_node, + ) + actual = OracleDatabaseClient.db_node_path( + project, location, cloud_vm_cluster, db_node + ) assert expected == actual @@ -7951,13 +9915,21 @@ def test_parse_db_node_path(): actual = OracleDatabaseClient.parse_db_node_path(path) assert expected == actual + def test_db_server_path(): project = "winkle" location = "nautilus" cloud_exadata_infrastructure = "scallop" db_server = "abalone" - expected = "projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}/dbServers/{db_server}".format(project=project, location=location, cloud_exadata_infrastructure=cloud_exadata_infrastructure, db_server=db_server, ) - actual = OracleDatabaseClient.db_server_path(project, location, cloud_exadata_infrastructure, db_server) + expected = "projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}/dbServers/{db_server}".format( + project=project, + location=location, + cloud_exadata_infrastructure=cloud_exadata_infrastructure, + db_server=db_server, + ) + actual = OracleDatabaseClient.db_server_path( + project, location, cloud_exadata_infrastructure, db_server + ) assert expected == actual @@ -7974,12 +9946,19 @@ def test_parse_db_server_path(): actual = OracleDatabaseClient.parse_db_server_path(path) assert expected == actual + def test_db_system_shape_path(): project = "oyster" location = "nudibranch" db_system_shape = "cuttlefish" - expected = "projects/{project}/locations/{location}/dbSystemShapes/{db_system_shape}".format(project=project, location=location, db_system_shape=db_system_shape, ) - actual = OracleDatabaseClient.db_system_shape_path(project, location, db_system_shape) + expected = "projects/{project}/locations/{location}/dbSystemShapes/{db_system_shape}".format( + project=project, + location=location, + db_system_shape=db_system_shape, + ) + actual = OracleDatabaseClient.db_system_shape_path( + project, location, db_system_shape + ) assert expected == actual @@ -7995,11 +9974,18 @@ def test_parse_db_system_shape_path(): actual = OracleDatabaseClient.parse_db_system_shape_path(path) assert expected == actual + def test_entitlement_path(): project = "scallop" location = "abalone" entitlement = "squid" - expected = "projects/{project}/locations/{location}/entitlements/{entitlement}".format(project=project, location=location, entitlement=entitlement, ) + expected = ( + "projects/{project}/locations/{location}/entitlements/{entitlement}".format( + project=project, + location=location, + entitlement=entitlement, + ) + ) actual = OracleDatabaseClient.entitlement_path(project, location, entitlement) assert expected == actual @@ -8016,11 +10002,16 @@ def test_parse_entitlement_path(): actual = OracleDatabaseClient.parse_entitlement_path(path) assert expected == actual + def test_gi_version_path(): project = "oyster" location = "nudibranch" gi_version = "cuttlefish" - expected = "projects/{project}/locations/{location}/giVersions/{gi_version}".format(project=project, location=location, gi_version=gi_version, ) + expected = "projects/{project}/locations/{location}/giVersions/{gi_version}".format( + project=project, + location=location, + gi_version=gi_version, + ) actual = OracleDatabaseClient.gi_version_path(project, location, gi_version) assert expected == actual @@ -8037,10 +10028,14 @@ def test_parse_gi_version_path(): actual = OracleDatabaseClient.parse_gi_version_path(path) assert expected == actual + def test_network_path(): project = "scallop" network = "abalone" - expected = "projects/{project}/global/networks/{network}".format(project=project, network=network, ) + expected = "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) actual = OracleDatabaseClient.network_path(project, network) assert expected == actual @@ -8056,9 +10051,12 @@ def test_parse_network_path(): actual = OracleDatabaseClient.parse_network_path(path) assert expected == actual + def test_common_billing_account_path(): billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) actual = OracleDatabaseClient.common_billing_account_path(billing_account) assert expected == actual @@ -8073,9 +10071,12 @@ def test_parse_common_billing_account_path(): actual = OracleDatabaseClient.parse_common_billing_account_path(path) assert expected == actual + def test_common_folder_path(): folder = "oyster" - expected = "folders/{folder}".format(folder=folder, ) + expected = "folders/{folder}".format( + folder=folder, + ) actual = OracleDatabaseClient.common_folder_path(folder) assert expected == actual @@ -8090,9 +10091,12 @@ def test_parse_common_folder_path(): actual = OracleDatabaseClient.parse_common_folder_path(path) assert expected == actual + def test_common_organization_path(): organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization, ) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = OracleDatabaseClient.common_organization_path(organization) assert expected == actual @@ -8107,9 +10111,12 @@ def test_parse_common_organization_path(): actual = OracleDatabaseClient.parse_common_organization_path(path) assert expected == actual + def test_common_project_path(): project = "winkle" - expected = "projects/{project}".format(project=project, ) + expected = "projects/{project}".format( + project=project, + ) actual = OracleDatabaseClient.common_project_path(project) assert expected == actual @@ -8124,10 +10131,14 @@ def test_parse_common_project_path(): actual = OracleDatabaseClient.parse_common_project_path(path) assert expected == actual + def test_common_location_path(): project = "scallop" location = "abalone" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) actual = OracleDatabaseClient.common_location_path(project, location) assert expected == actual @@ -8147,14 +10158,18 @@ def test_parse_common_location_path(): def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() - with mock.patch.object(transports.OracleDatabaseTransport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.OracleDatabaseTransport, "_prep_wrapped_messages" + ) as prep: client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) - with mock.patch.object(transports.OracleDatabaseTransport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.OracleDatabaseTransport, "_prep_wrapped_messages" + ) as prep: transport_class = OracleDatabaseClient.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), @@ -8163,17 +10178,23 @@ def test_client_with_default_client_info(): prep.assert_called_once_with(client_info) -def test_get_location_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.GetLocationRequest): +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -8181,19 +10202,23 @@ def test_get_location_rest_bad_request(transport: str = 'rest', request_type=loc req.return_value = response_value client.get_location(request) -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) def test_get_location_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2'} + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = locations_pb2.Location() @@ -8202,7 +10227,7 @@ def test_get_location_rest(request_type): response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_location(request) @@ -8210,17 +10235,22 @@ def test_get_location_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.Location) -def test_list_locations_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.ListLocationsRequest): + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) + request = json_format.ParseDict({"name": "projects/sample1"}, request) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -8228,19 +10258,23 @@ def test_list_locations_rest_bad_request(transport: str = 'rest', request_type=l req.return_value = response_value client.list_locations(request) -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) def test_list_locations_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1'} + request_init = {"name": "projects/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = locations_pb2.ListLocationsResponse() @@ -8249,7 +10283,7 @@ def test_list_locations_rest(request_type): response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_locations(request) @@ -8257,17 +10291,24 @@ def test_list_locations_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) -def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -8275,28 +10316,32 @@ def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type req.return_value = response_value client.cancel_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) def test_cancel_operation_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = '{}' + json_return_value = "{}" - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.cancel_operation(request) @@ -8304,17 +10349,24 @@ def test_cancel_operation_rest(request_type): # Establish that the response is the type that we expect. assert response is None -def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.DeleteOperationRequest): + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -8322,28 +10374,32 @@ def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type req.return_value = response_value client.delete_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) def test_delete_operation_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = '{}' + json_return_value = "{}" - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_operation(request) @@ -8351,17 +10407,24 @@ def test_delete_operation_rest(request_type): # Establish that the response is the type that we expect. assert response is None -def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -8369,19 +10432,23 @@ def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=op req.return_value = response_value client.get_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) def test_get_operation_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.Operation() @@ -8390,7 +10457,7 @@ def test_get_operation_rest(request_type): response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_operation(request) @@ -8398,17 +10465,24 @@ def test_get_operation_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) -def test_list_operations_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.ListOperationsRequest): + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -8416,19 +10490,23 @@ def test_list_operations_rest_bad_request(transport: str = 'rest', request_type= req.return_value = response_value client.list_operations(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) def test_list_operations_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2'} + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.ListOperationsResponse() @@ -8437,7 +10515,7 @@ def test_list_operations_rest(request_type): response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_operations(request) @@ -8453,22 +10531,23 @@ def test_transport_close(): for transport, close_name in transports.items(): client = OracleDatabaseClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: with client: close.assert_not_called() close.assert_called_once() + def test_client_ctx(): transports = [ - 'rest', + "rest", ] for transport in transports: client = OracleDatabaseClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. with mock.patch.object(type(client.transport), "close") as close: @@ -8477,9 +10556,13 @@ def test_client_ctx(): pass close.assert_called() -@pytest.mark.parametrize("client_class,transport_class", [ - (OracleDatabaseClient, transports.OracleDatabaseRestTransport), -]) + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport), + ], +) def test_api_key_credentials(client_class, transport_class): with mock.patch.object( google.auth._default, "get_api_key_credentials", create=True @@ -8494,7 +10577,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, From a3194c8a7d745f256b235adf88375ca293dbcf21 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Mon, 23 Sep 2024 16:27:20 +0000 Subject: [PATCH 6/9] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20po?= =?UTF-8?q?st-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- .../v1/MANIFEST.in | 2 - .../google-cloud-oracledatabase/v1/README.rst | 49 - .../v1/docs/_static/custom.css | 3 - .../v1/docs/index.rst | 7 - .../google/cloud/oracledatabase/__init__.py | 161 - .../cloud/oracledatabase_v1/__init__.py | 162 - .../cloud/oracledatabase_v1/types/__init__.py | 182 - .../google-cloud-oracledatabase/v1/noxfile.py | 278 - .../google-cloud-oracledatabase}/.coveragerc | 0 .../google-cloud-oracledatabase}/.flake8 | 4 +- .../google-cloud-oracledatabase/.gitignore | 63 + .../google-cloud-oracledatabase/CHANGELOG.md | 1 + .../CODE_OF_CONDUCT.md | 95 + .../CONTRIBUTING.rst | 271 + packages/google-cloud-oracledatabase/LICENSE | 202 + .../google-cloud-oracledatabase/MANIFEST.in | 25 + .../google-cloud-oracledatabase/README.rst | 108 + .../docs/CHANGELOG.md | 1 + .../docs/README.rst | 1 + .../docs/_static/custom.css | 20 + .../docs/_templates/layout.html | 50 + .../google-cloud-oracledatabase}/docs/conf.py | 74 +- .../docs/index.rst | 28 + .../docs/multiprocessing.rst | 7 + .../oracledatabase_v1/oracle_database.rst | 0 .../docs/oracledatabase_v1/services_.rst | 0 .../docs/oracledatabase_v1/types_.rst | 0 .../docs/summary_overview.md | 22 + .../google/cloud/oracledatabase/__init__.py | 177 + .../cloud/oracledatabase/gapic_version.py | 0 .../google/cloud/oracledatabase/py.typed | 0 .../cloud/oracledatabase_v1/__init__.py | 168 + .../oracledatabase_v1/gapic_metadata.json | 0 .../cloud/oracledatabase_v1/gapic_version.py | 0 .../google/cloud/oracledatabase_v1/py.typed | 0 .../oracledatabase_v1/services/__init__.py | 0 .../services/oracle_database/__init__.py | 4 +- .../services/oracle_database/client.py | 1346 ++-- .../services/oracle_database/pagers.py | 366 +- .../oracle_database/transports/__init__.py | 12 +- .../oracle_database/transports/base.py | 430 +- .../oracle_database/transports/rest.py | 2530 +++++--- .../cloud/oracledatabase_v1/types/__init__.py | 157 + .../types/autonomous_database.py | 85 +- .../autonomous_database_character_set.py | 6 +- .../types/autonomous_db_backup.py | 15 +- .../types/autonomous_db_version.py | 5 +- .../cloud/oracledatabase_v1/types/common.py | 5 +- .../cloud/oracledatabase_v1/types/db_node.py | 12 +- .../oracledatabase_v1/types/db_server.py | 12 +- .../types/db_system_shape.py | 5 +- .../oracledatabase_v1/types/entitlement.py | 12 +- .../oracledatabase_v1/types/exadata_infra.py | 23 +- .../oracledatabase_v1/types/gi_version.py | 5 +- .../types/location_metadata.py | 5 +- .../oracledatabase_v1/types/oracledatabase.py | 135 +- .../oracledatabase_v1/types/vm_cluster.py | 21 +- .../google-cloud-oracledatabase}/mypy.ini | 0 .../google-cloud-oracledatabase/noxfile.py | 452 ++ ...atabase_create_autonomous_database_sync.py | 0 ...reate_cloud_exadata_infrastructure_sync.py | 0 ...e_database_create_cloud_vm_cluster_sync.py | 0 ...atabase_delete_autonomous_database_sync.py | 0 ...elete_cloud_exadata_infrastructure_sync.py | 0 ...e_database_delete_cloud_vm_cluster_sync.py | 0 ...enerate_autonomous_database_wallet_sync.py | 0 ...e_database_get_autonomous_database_sync.py | 0 ...e_get_cloud_exadata_infrastructure_sync.py | 0 ...acle_database_get_cloud_vm_cluster_sync.py | 0 ...e_list_autonomous_database_backups_sync.py | 0 ...autonomous_database_character_sets_sync.py | 0 ...database_list_autonomous_databases_sync.py | 0 ...tabase_list_autonomous_db_versions_sync.py | 0 ...list_cloud_exadata_infrastructures_sync.py | 0 ...le_database_list_cloud_vm_clusters_sync.py | 0 ...ated_oracle_database_list_db_nodes_sync.py | 0 ...ed_oracle_database_list_db_servers_sync.py | 0 ...cle_database_list_db_system_shapes_sync.py | 0 ..._oracle_database_list_entitlements_sync.py | 0 ...d_oracle_database_list_gi_versions_sync.py | 0 ...tabase_restore_autonomous_database_sync.py | 0 ...tadata_google.cloud.oracledatabase.v1.json | 0 .../scripts/decrypt-secrets.sh | 46 + .../fixup_oracledatabase_v1_keywords.py | 0 .../google-cloud-oracledatabase}/setup.py | 10 +- .../testing/.gitignore | 3 + .../testing/constraints-3.10.txt | 0 .../testing/constraints-3.11.txt | 0 .../testing/constraints-3.12.txt | 0 .../testing/constraints-3.7.txt | 0 .../testing/constraints-3.8.txt | 0 .../testing/constraints-3.9.txt | 0 .../tests/__init__.py | 1 - .../tests/unit/__init__.py | 1 - .../tests/unit/gapic}/__init__.py | 1 - .../unit/gapic/oracledatabase_v1}/__init__.py | 1 - .../oracledatabase_v1/test_oracle_database.py | 5765 +++++++++++------ 97 files changed, 8910 insertions(+), 4722 deletions(-) delete mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/MANIFEST.in delete mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/README.rst delete mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/docs/_static/custom.css delete mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/docs/index.rst delete mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase/__init__.py delete mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/__init__.py delete mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/__init__.py delete mode 100644 owl-bot-staging/google-cloud-oracledatabase/v1/noxfile.py rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/.coveragerc (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/.flake8 (92%) create mode 100644 packages/google-cloud-oracledatabase/.gitignore create mode 100644 packages/google-cloud-oracledatabase/CHANGELOG.md create mode 100644 packages/google-cloud-oracledatabase/CODE_OF_CONDUCT.md create mode 100644 packages/google-cloud-oracledatabase/CONTRIBUTING.rst create mode 100644 packages/google-cloud-oracledatabase/LICENSE create mode 100644 packages/google-cloud-oracledatabase/MANIFEST.in create mode 100644 packages/google-cloud-oracledatabase/README.rst create mode 120000 packages/google-cloud-oracledatabase/docs/CHANGELOG.md create mode 120000 packages/google-cloud-oracledatabase/docs/README.rst create mode 100644 packages/google-cloud-oracledatabase/docs/_static/custom.css create mode 100644 packages/google-cloud-oracledatabase/docs/_templates/layout.html rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/docs/conf.py (88%) create mode 100644 packages/google-cloud-oracledatabase/docs/index.rst create mode 100644 packages/google-cloud-oracledatabase/docs/multiprocessing.rst rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/docs/oracledatabase_v1/oracle_database.rst (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/docs/oracledatabase_v1/services_.rst (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/docs/oracledatabase_v1/types_.rst (100%) create mode 100644 packages/google-cloud-oracledatabase/docs/summary_overview.md create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase/__init__.py rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase/gapic_version.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase/py.typed (100%) create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/__init__.py rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/gapic_metadata.json (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/gapic_version.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/py.typed (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/services/__init__.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/services/oracle_database/__init__.py (93%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/services/oracle_database/client.py (79%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/services/oracle_database/pagers.py (76%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/services/oracle_database/transports/__init__.py (76%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/services/oracle_database/transports/base.py (68%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/services/oracle_database/transports/rest.py (60%) create mode 100644 packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/__init__.py rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/types/autonomous_database.py (96%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/types/autonomous_database_character_set.py (96%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/types/autonomous_db_backup.py (97%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/types/autonomous_db_version.py (96%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/types/common.py (94%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/types/db_node.py (96%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/types/db_server.py (96%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/types/db_system_shape.py (97%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/types/entitlement.py (94%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/types/exadata_infra.py (97%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/types/gi_version.py (95%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/types/location_metadata.py (94%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/types/oracledatabase.py (92%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/google/cloud/oracledatabase_v1/types/vm_cluster.py (97%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/mypy.ini (100%) create mode 100644 packages/google-cloud-oracledatabase/noxfile.py rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_autonomous_database_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_exadata_infrastructure_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_vm_cluster_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_autonomous_database_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_exadata_infrastructure_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_vm_cluster_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_generate_autonomous_database_wallet_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_autonomous_database_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_exadata_infrastructure_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_vm_cluster_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_backups_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_character_sets_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_databases_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_db_versions_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_exadata_infrastructures_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_vm_clusters_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_nodes_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_servers_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_system_shapes_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_entitlements_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_gi_versions_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/oracledatabase_v1_generated_oracle_database_restore_autonomous_database_sync.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/samples/generated_samples/snippet_metadata_google.cloud.oracledatabase.v1.json (100%) create mode 100755 packages/google-cloud-oracledatabase/scripts/decrypt-secrets.sh rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/scripts/fixup_oracledatabase_v1_keywords.py (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/setup.py (93%) create mode 100644 packages/google-cloud-oracledatabase/testing/.gitignore rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/testing/constraints-3.10.txt (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/testing/constraints-3.11.txt (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/testing/constraints-3.12.txt (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/testing/constraints-3.7.txt (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/testing/constraints-3.8.txt (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/testing/constraints-3.9.txt (100%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/tests/__init__.py (99%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/tests/unit/__init__.py (99%) rename {owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/oracledatabase_v1 => packages/google-cloud-oracledatabase/tests/unit/gapic}/__init__.py (99%) rename {owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic => packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1}/__init__.py (99%) rename {owl-bot-staging/google-cloud-oracledatabase/v1 => packages/google-cloud-oracledatabase}/tests/unit/gapic/oracledatabase_v1/test_oracle_database.py (63%) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/MANIFEST.in b/owl-bot-staging/google-cloud-oracledatabase/v1/MANIFEST.in deleted file mode 100644 index fb8faa261eae..000000000000 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/oracledatabase *.py -recursive-include google/cloud/oracledatabase_v1 *.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/README.rst b/owl-bot-staging/google-cloud-oracledatabase/v1/README.rst deleted file mode 100644 index 33823b82e94f..000000000000 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Oracledatabase API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Oracledatabase API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-oracledatabase/v1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/docs/index.rst b/owl-bot-staging/google-cloud-oracledatabase/v1/docs/index.rst deleted file mode 100644 index 8b7bea7d55e1..000000000000 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - oracledatabase_v1/services - oracledatabase_v1/types diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase/__init__.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase/__init__.py deleted file mode 100644 index 59a90fe071f9..000000000000 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase/__init__.py +++ /dev/null @@ -1,161 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.oracledatabase import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.oracledatabase_v1.services.oracle_database.client import OracleDatabaseClient - -from google.cloud.oracledatabase_v1.types.autonomous_database import AllConnectionStrings -from google.cloud.oracledatabase_v1.types.autonomous_database import AutonomousDatabase -from google.cloud.oracledatabase_v1.types.autonomous_database import AutonomousDatabaseApex -from google.cloud.oracledatabase_v1.types.autonomous_database import AutonomousDatabaseConnectionStrings -from google.cloud.oracledatabase_v1.types.autonomous_database import AutonomousDatabaseConnectionUrls -from google.cloud.oracledatabase_v1.types.autonomous_database import AutonomousDatabaseProperties -from google.cloud.oracledatabase_v1.types.autonomous_database import AutonomousDatabaseStandbySummary -from google.cloud.oracledatabase_v1.types.autonomous_database import DatabaseConnectionStringProfile -from google.cloud.oracledatabase_v1.types.autonomous_database import ScheduledOperationDetails -from google.cloud.oracledatabase_v1.types.autonomous_database import DBWorkload -from google.cloud.oracledatabase_v1.types.autonomous_database import GenerateType -from google.cloud.oracledatabase_v1.types.autonomous_database import OperationsInsightsState -from google.cloud.oracledatabase_v1.types.autonomous_database import State -from google.cloud.oracledatabase_v1.types.autonomous_database_character_set import AutonomousDatabaseCharacterSet -from google.cloud.oracledatabase_v1.types.autonomous_db_backup import AutonomousDatabaseBackup -from google.cloud.oracledatabase_v1.types.autonomous_db_backup import AutonomousDatabaseBackupProperties -from google.cloud.oracledatabase_v1.types.autonomous_db_version import AutonomousDbVersion -from google.cloud.oracledatabase_v1.types.common import CustomerContact -from google.cloud.oracledatabase_v1.types.db_node import DbNode -from google.cloud.oracledatabase_v1.types.db_node import DbNodeProperties -from google.cloud.oracledatabase_v1.types.db_server import DbServer -from google.cloud.oracledatabase_v1.types.db_server import DbServerProperties -from google.cloud.oracledatabase_v1.types.db_system_shape import DbSystemShape -from google.cloud.oracledatabase_v1.types.entitlement import CloudAccountDetails -from google.cloud.oracledatabase_v1.types.entitlement import Entitlement -from google.cloud.oracledatabase_v1.types.exadata_infra import CloudExadataInfrastructure -from google.cloud.oracledatabase_v1.types.exadata_infra import CloudExadataInfrastructureProperties -from google.cloud.oracledatabase_v1.types.exadata_infra import MaintenanceWindow -from google.cloud.oracledatabase_v1.types.gi_version import GiVersion -from google.cloud.oracledatabase_v1.types.location_metadata import LocationMetadata -from google.cloud.oracledatabase_v1.types.oracledatabase import CreateAutonomousDatabaseRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import CreateCloudExadataInfrastructureRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import CreateCloudVmClusterRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import DeleteAutonomousDatabaseRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import DeleteCloudExadataInfrastructureRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import DeleteCloudVmClusterRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import GenerateAutonomousDatabaseWalletRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import GenerateAutonomousDatabaseWalletResponse -from google.cloud.oracledatabase_v1.types.oracledatabase import GetAutonomousDatabaseRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import GetCloudExadataInfrastructureRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import GetCloudVmClusterRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import ListAutonomousDatabaseBackupsRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import ListAutonomousDatabaseBackupsResponse -from google.cloud.oracledatabase_v1.types.oracledatabase import ListAutonomousDatabaseCharacterSetsRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import ListAutonomousDatabaseCharacterSetsResponse -from google.cloud.oracledatabase_v1.types.oracledatabase import ListAutonomousDatabasesRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import ListAutonomousDatabasesResponse -from google.cloud.oracledatabase_v1.types.oracledatabase import ListAutonomousDbVersionsRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import ListAutonomousDbVersionsResponse -from google.cloud.oracledatabase_v1.types.oracledatabase import ListCloudExadataInfrastructuresRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import ListCloudExadataInfrastructuresResponse -from google.cloud.oracledatabase_v1.types.oracledatabase import ListCloudVmClustersRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import ListCloudVmClustersResponse -from google.cloud.oracledatabase_v1.types.oracledatabase import ListDbNodesRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import ListDbNodesResponse -from google.cloud.oracledatabase_v1.types.oracledatabase import ListDbServersRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import ListDbServersResponse -from google.cloud.oracledatabase_v1.types.oracledatabase import ListDbSystemShapesRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import ListDbSystemShapesResponse -from google.cloud.oracledatabase_v1.types.oracledatabase import ListEntitlementsRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import ListEntitlementsResponse -from google.cloud.oracledatabase_v1.types.oracledatabase import ListGiVersionsRequest -from google.cloud.oracledatabase_v1.types.oracledatabase import ListGiVersionsResponse -from google.cloud.oracledatabase_v1.types.oracledatabase import OperationMetadata -from google.cloud.oracledatabase_v1.types.oracledatabase import RestoreAutonomousDatabaseRequest -from google.cloud.oracledatabase_v1.types.vm_cluster import CloudVmCluster -from google.cloud.oracledatabase_v1.types.vm_cluster import CloudVmClusterProperties -from google.cloud.oracledatabase_v1.types.vm_cluster import DataCollectionOptions - -__all__ = ('OracleDatabaseClient', - 'AllConnectionStrings', - 'AutonomousDatabase', - 'AutonomousDatabaseApex', - 'AutonomousDatabaseConnectionStrings', - 'AutonomousDatabaseConnectionUrls', - 'AutonomousDatabaseProperties', - 'AutonomousDatabaseStandbySummary', - 'DatabaseConnectionStringProfile', - 'ScheduledOperationDetails', - 'DBWorkload', - 'GenerateType', - 'OperationsInsightsState', - 'State', - 'AutonomousDatabaseCharacterSet', - 'AutonomousDatabaseBackup', - 'AutonomousDatabaseBackupProperties', - 'AutonomousDbVersion', - 'CustomerContact', - 'DbNode', - 'DbNodeProperties', - 'DbServer', - 'DbServerProperties', - 'DbSystemShape', - 'CloudAccountDetails', - 'Entitlement', - 'CloudExadataInfrastructure', - 'CloudExadataInfrastructureProperties', - 'MaintenanceWindow', - 'GiVersion', - 'LocationMetadata', - 'CreateAutonomousDatabaseRequest', - 'CreateCloudExadataInfrastructureRequest', - 'CreateCloudVmClusterRequest', - 'DeleteAutonomousDatabaseRequest', - 'DeleteCloudExadataInfrastructureRequest', - 'DeleteCloudVmClusterRequest', - 'GenerateAutonomousDatabaseWalletRequest', - 'GenerateAutonomousDatabaseWalletResponse', - 'GetAutonomousDatabaseRequest', - 'GetCloudExadataInfrastructureRequest', - 'GetCloudVmClusterRequest', - 'ListAutonomousDatabaseBackupsRequest', - 'ListAutonomousDatabaseBackupsResponse', - 'ListAutonomousDatabaseCharacterSetsRequest', - 'ListAutonomousDatabaseCharacterSetsResponse', - 'ListAutonomousDatabasesRequest', - 'ListAutonomousDatabasesResponse', - 'ListAutonomousDbVersionsRequest', - 'ListAutonomousDbVersionsResponse', - 'ListCloudExadataInfrastructuresRequest', - 'ListCloudExadataInfrastructuresResponse', - 'ListCloudVmClustersRequest', - 'ListCloudVmClustersResponse', - 'ListDbNodesRequest', - 'ListDbNodesResponse', - 'ListDbServersRequest', - 'ListDbServersResponse', - 'ListDbSystemShapesRequest', - 'ListDbSystemShapesResponse', - 'ListEntitlementsRequest', - 'ListEntitlementsResponse', - 'ListGiVersionsRequest', - 'ListGiVersionsResponse', - 'OperationMetadata', - 'RestoreAutonomousDatabaseRequest', - 'CloudVmCluster', - 'CloudVmClusterProperties', - 'DataCollectionOptions', -) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/__init__.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/__init__.py deleted file mode 100644 index 8b3946e07399..000000000000 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/__init__.py +++ /dev/null @@ -1,162 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.oracledatabase_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.oracle_database import OracleDatabaseClient - -from .types.autonomous_database import AllConnectionStrings -from .types.autonomous_database import AutonomousDatabase -from .types.autonomous_database import AutonomousDatabaseApex -from .types.autonomous_database import AutonomousDatabaseConnectionStrings -from .types.autonomous_database import AutonomousDatabaseConnectionUrls -from .types.autonomous_database import AutonomousDatabaseProperties -from .types.autonomous_database import AutonomousDatabaseStandbySummary -from .types.autonomous_database import DatabaseConnectionStringProfile -from .types.autonomous_database import ScheduledOperationDetails -from .types.autonomous_database import DBWorkload -from .types.autonomous_database import GenerateType -from .types.autonomous_database import OperationsInsightsState -from .types.autonomous_database import State -from .types.autonomous_database_character_set import AutonomousDatabaseCharacterSet -from .types.autonomous_db_backup import AutonomousDatabaseBackup -from .types.autonomous_db_backup import AutonomousDatabaseBackupProperties -from .types.autonomous_db_version import AutonomousDbVersion -from .types.common import CustomerContact -from .types.db_node import DbNode -from .types.db_node import DbNodeProperties -from .types.db_server import DbServer -from .types.db_server import DbServerProperties -from .types.db_system_shape import DbSystemShape -from .types.entitlement import CloudAccountDetails -from .types.entitlement import Entitlement -from .types.exadata_infra import CloudExadataInfrastructure -from .types.exadata_infra import CloudExadataInfrastructureProperties -from .types.exadata_infra import MaintenanceWindow -from .types.gi_version import GiVersion -from .types.location_metadata import LocationMetadata -from .types.oracledatabase import CreateAutonomousDatabaseRequest -from .types.oracledatabase import CreateCloudExadataInfrastructureRequest -from .types.oracledatabase import CreateCloudVmClusterRequest -from .types.oracledatabase import DeleteAutonomousDatabaseRequest -from .types.oracledatabase import DeleteCloudExadataInfrastructureRequest -from .types.oracledatabase import DeleteCloudVmClusterRequest -from .types.oracledatabase import GenerateAutonomousDatabaseWalletRequest -from .types.oracledatabase import GenerateAutonomousDatabaseWalletResponse -from .types.oracledatabase import GetAutonomousDatabaseRequest -from .types.oracledatabase import GetCloudExadataInfrastructureRequest -from .types.oracledatabase import GetCloudVmClusterRequest -from .types.oracledatabase import ListAutonomousDatabaseBackupsRequest -from .types.oracledatabase import ListAutonomousDatabaseBackupsResponse -from .types.oracledatabase import ListAutonomousDatabaseCharacterSetsRequest -from .types.oracledatabase import ListAutonomousDatabaseCharacterSetsResponse -from .types.oracledatabase import ListAutonomousDatabasesRequest -from .types.oracledatabase import ListAutonomousDatabasesResponse -from .types.oracledatabase import ListAutonomousDbVersionsRequest -from .types.oracledatabase import ListAutonomousDbVersionsResponse -from .types.oracledatabase import ListCloudExadataInfrastructuresRequest -from .types.oracledatabase import ListCloudExadataInfrastructuresResponse -from .types.oracledatabase import ListCloudVmClustersRequest -from .types.oracledatabase import ListCloudVmClustersResponse -from .types.oracledatabase import ListDbNodesRequest -from .types.oracledatabase import ListDbNodesResponse -from .types.oracledatabase import ListDbServersRequest -from .types.oracledatabase import ListDbServersResponse -from .types.oracledatabase import ListDbSystemShapesRequest -from .types.oracledatabase import ListDbSystemShapesResponse -from .types.oracledatabase import ListEntitlementsRequest -from .types.oracledatabase import ListEntitlementsResponse -from .types.oracledatabase import ListGiVersionsRequest -from .types.oracledatabase import ListGiVersionsResponse -from .types.oracledatabase import OperationMetadata -from .types.oracledatabase import RestoreAutonomousDatabaseRequest -from .types.vm_cluster import CloudVmCluster -from .types.vm_cluster import CloudVmClusterProperties -from .types.vm_cluster import DataCollectionOptions - -__all__ = ( -'AllConnectionStrings', -'AutonomousDatabase', -'AutonomousDatabaseApex', -'AutonomousDatabaseBackup', -'AutonomousDatabaseBackupProperties', -'AutonomousDatabaseCharacterSet', -'AutonomousDatabaseConnectionStrings', -'AutonomousDatabaseConnectionUrls', -'AutonomousDatabaseProperties', -'AutonomousDatabaseStandbySummary', -'AutonomousDbVersion', -'CloudAccountDetails', -'CloudExadataInfrastructure', -'CloudExadataInfrastructureProperties', -'CloudVmCluster', -'CloudVmClusterProperties', -'CreateAutonomousDatabaseRequest', -'CreateCloudExadataInfrastructureRequest', -'CreateCloudVmClusterRequest', -'CustomerContact', -'DBWorkload', -'DataCollectionOptions', -'DatabaseConnectionStringProfile', -'DbNode', -'DbNodeProperties', -'DbServer', -'DbServerProperties', -'DbSystemShape', -'DeleteAutonomousDatabaseRequest', -'DeleteCloudExadataInfrastructureRequest', -'DeleteCloudVmClusterRequest', -'Entitlement', -'GenerateAutonomousDatabaseWalletRequest', -'GenerateAutonomousDatabaseWalletResponse', -'GenerateType', -'GetAutonomousDatabaseRequest', -'GetCloudExadataInfrastructureRequest', -'GetCloudVmClusterRequest', -'GiVersion', -'ListAutonomousDatabaseBackupsRequest', -'ListAutonomousDatabaseBackupsResponse', -'ListAutonomousDatabaseCharacterSetsRequest', -'ListAutonomousDatabaseCharacterSetsResponse', -'ListAutonomousDatabasesRequest', -'ListAutonomousDatabasesResponse', -'ListAutonomousDbVersionsRequest', -'ListAutonomousDbVersionsResponse', -'ListCloudExadataInfrastructuresRequest', -'ListCloudExadataInfrastructuresResponse', -'ListCloudVmClustersRequest', -'ListCloudVmClustersResponse', -'ListDbNodesRequest', -'ListDbNodesResponse', -'ListDbServersRequest', -'ListDbServersResponse', -'ListDbSystemShapesRequest', -'ListDbSystemShapesResponse', -'ListEntitlementsRequest', -'ListEntitlementsResponse', -'ListGiVersionsRequest', -'ListGiVersionsResponse', -'LocationMetadata', -'MaintenanceWindow', -'OperationMetadata', -'OperationsInsightsState', -'OracleDatabaseClient', -'RestoreAutonomousDatabaseRequest', -'ScheduledOperationDetails', -'State', -) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/__init__.py b/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/__init__.py deleted file mode 100644 index 186fe76fa7d5..000000000000 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/__init__.py +++ /dev/null @@ -1,182 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .autonomous_database import ( - AllConnectionStrings, - AutonomousDatabase, - AutonomousDatabaseApex, - AutonomousDatabaseConnectionStrings, - AutonomousDatabaseConnectionUrls, - AutonomousDatabaseProperties, - AutonomousDatabaseStandbySummary, - DatabaseConnectionStringProfile, - ScheduledOperationDetails, - DBWorkload, - GenerateType, - OperationsInsightsState, - State, -) -from .autonomous_database_character_set import ( - AutonomousDatabaseCharacterSet, -) -from .autonomous_db_backup import ( - AutonomousDatabaseBackup, - AutonomousDatabaseBackupProperties, -) -from .autonomous_db_version import ( - AutonomousDbVersion, -) -from .common import ( - CustomerContact, -) -from .db_node import ( - DbNode, - DbNodeProperties, -) -from .db_server import ( - DbServer, - DbServerProperties, -) -from .db_system_shape import ( - DbSystemShape, -) -from .entitlement import ( - CloudAccountDetails, - Entitlement, -) -from .exadata_infra import ( - CloudExadataInfrastructure, - CloudExadataInfrastructureProperties, - MaintenanceWindow, -) -from .gi_version import ( - GiVersion, -) -from .location_metadata import ( - LocationMetadata, -) -from .oracledatabase import ( - CreateAutonomousDatabaseRequest, - CreateCloudExadataInfrastructureRequest, - CreateCloudVmClusterRequest, - DeleteAutonomousDatabaseRequest, - DeleteCloudExadataInfrastructureRequest, - DeleteCloudVmClusterRequest, - GenerateAutonomousDatabaseWalletRequest, - GenerateAutonomousDatabaseWalletResponse, - GetAutonomousDatabaseRequest, - GetCloudExadataInfrastructureRequest, - GetCloudVmClusterRequest, - ListAutonomousDatabaseBackupsRequest, - ListAutonomousDatabaseBackupsResponse, - ListAutonomousDatabaseCharacterSetsRequest, - ListAutonomousDatabaseCharacterSetsResponse, - ListAutonomousDatabasesRequest, - ListAutonomousDatabasesResponse, - ListAutonomousDbVersionsRequest, - ListAutonomousDbVersionsResponse, - ListCloudExadataInfrastructuresRequest, - ListCloudExadataInfrastructuresResponse, - ListCloudVmClustersRequest, - ListCloudVmClustersResponse, - ListDbNodesRequest, - ListDbNodesResponse, - ListDbServersRequest, - ListDbServersResponse, - ListDbSystemShapesRequest, - ListDbSystemShapesResponse, - ListEntitlementsRequest, - ListEntitlementsResponse, - ListGiVersionsRequest, - ListGiVersionsResponse, - OperationMetadata, - RestoreAutonomousDatabaseRequest, -) -from .vm_cluster import ( - CloudVmCluster, - CloudVmClusterProperties, - DataCollectionOptions, -) - -__all__ = ( - 'AllConnectionStrings', - 'AutonomousDatabase', - 'AutonomousDatabaseApex', - 'AutonomousDatabaseConnectionStrings', - 'AutonomousDatabaseConnectionUrls', - 'AutonomousDatabaseProperties', - 'AutonomousDatabaseStandbySummary', - 'DatabaseConnectionStringProfile', - 'ScheduledOperationDetails', - 'DBWorkload', - 'GenerateType', - 'OperationsInsightsState', - 'State', - 'AutonomousDatabaseCharacterSet', - 'AutonomousDatabaseBackup', - 'AutonomousDatabaseBackupProperties', - 'AutonomousDbVersion', - 'CustomerContact', - 'DbNode', - 'DbNodeProperties', - 'DbServer', - 'DbServerProperties', - 'DbSystemShape', - 'CloudAccountDetails', - 'Entitlement', - 'CloudExadataInfrastructure', - 'CloudExadataInfrastructureProperties', - 'MaintenanceWindow', - 'GiVersion', - 'LocationMetadata', - 'CreateAutonomousDatabaseRequest', - 'CreateCloudExadataInfrastructureRequest', - 'CreateCloudVmClusterRequest', - 'DeleteAutonomousDatabaseRequest', - 'DeleteCloudExadataInfrastructureRequest', - 'DeleteCloudVmClusterRequest', - 'GenerateAutonomousDatabaseWalletRequest', - 'GenerateAutonomousDatabaseWalletResponse', - 'GetAutonomousDatabaseRequest', - 'GetCloudExadataInfrastructureRequest', - 'GetCloudVmClusterRequest', - 'ListAutonomousDatabaseBackupsRequest', - 'ListAutonomousDatabaseBackupsResponse', - 'ListAutonomousDatabaseCharacterSetsRequest', - 'ListAutonomousDatabaseCharacterSetsResponse', - 'ListAutonomousDatabasesRequest', - 'ListAutonomousDatabasesResponse', - 'ListAutonomousDbVersionsRequest', - 'ListAutonomousDbVersionsResponse', - 'ListCloudExadataInfrastructuresRequest', - 'ListCloudExadataInfrastructuresResponse', - 'ListCloudVmClustersRequest', - 'ListCloudVmClustersResponse', - 'ListDbNodesRequest', - 'ListDbNodesResponse', - 'ListDbServersRequest', - 'ListDbServersResponse', - 'ListDbSystemShapesRequest', - 'ListDbSystemShapesResponse', - 'ListEntitlementsRequest', - 'ListEntitlementsResponse', - 'ListGiVersionsRequest', - 'ListGiVersionsResponse', - 'OperationMetadata', - 'RestoreAutonomousDatabaseRequest', - 'CloudVmCluster', - 'CloudVmClusterProperties', - 'DataCollectionOptions', -) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/noxfile.py b/owl-bot-staging/google-cloud-oracledatabase/v1/noxfile.py deleted file mode 100644 index f7f7ff3c7bf6..000000000000 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/noxfile.py +++ /dev/null @@ -1,278 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12" -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-oracledatabase' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.12" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/oracledatabase_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - "grpcio", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/oracledatabase_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/.coveragerc b/packages/google-cloud-oracledatabase/.coveragerc similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/.coveragerc rename to packages/google-cloud-oracledatabase/.coveragerc diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/.flake8 b/packages/google-cloud-oracledatabase/.flake8 similarity index 92% rename from owl-bot-staging/google-cloud-oracledatabase/v1/.flake8 rename to packages/google-cloud-oracledatabase/.flake8 index 29227d4cf419..87f6e408c47d 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/.flake8 +++ b/packages/google-cloud-oracledatabase/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,7 +16,7 @@ # Generated by synthtool. DO NOT EDIT! [flake8] -ignore = E203, E266, E501, W503 +ignore = E203, E231, E266, E501, W503 exclude = # Exclude generated code. **/proto/** diff --git a/packages/google-cloud-oracledatabase/.gitignore b/packages/google-cloud-oracledatabase/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-oracledatabase/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-oracledatabase/CHANGELOG.md b/packages/google-cloud-oracledatabase/CHANGELOG.md new file mode 100644 index 000000000000..5ddad421e08f --- /dev/null +++ b/packages/google-cloud-oracledatabase/CHANGELOG.md @@ -0,0 +1 @@ +# Changelog \ No newline at end of file diff --git a/packages/google-cloud-oracledatabase/CODE_OF_CONDUCT.md b/packages/google-cloud-oracledatabase/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-oracledatabase/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-oracledatabase/CONTRIBUTING.rst b/packages/google-cloud-oracledatabase/CONTRIBUTING.rst new file mode 100644 index 000000000000..9b24d1115e7f --- /dev/null +++ b/packages/google-cloud-oracledatabase/CONTRIBUTING.rst @@ -0,0 +1,271 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.12 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.12 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10, 3.11 and 3.12. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-oracledatabase + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-oracledatabase/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-oracledatabase/LICENSE b/packages/google-cloud-oracledatabase/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-oracledatabase/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-oracledatabase/MANIFEST.in b/packages/google-cloud-oracledatabase/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-cloud-oracledatabase/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-oracledatabase/README.rst b/packages/google-cloud-oracledatabase/README.rst new file mode 100644 index 000000000000..3d0109a492fc --- /dev/null +++ b/packages/google-cloud-oracledatabase/README.rst @@ -0,0 +1,108 @@ +Python Client for +================== + +|preview| |pypi| |versions| + +``_: + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-oracledatabase.svg + :target: https://pypi.org/project/google-cloud-oracledatabase/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-oracledatabase.svg + :target: https://pypi.org/project/google-cloud-oracledatabase/ +.. _: +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/google-cloud-oracledatabase/latest/summary_overview +.. _Product Documentation: + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the .`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the .: +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-oracledatabase/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-oracledatabase + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-oracledatabase + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for + to see other available methods on the client. +- Read the ` Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _ Product documentation: +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-oracledatabase/docs/CHANGELOG.md b/packages/google-cloud-oracledatabase/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-oracledatabase/docs/README.rst b/packages/google-cloud-oracledatabase/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-oracledatabase/docs/_static/custom.css b/packages/google-cloud-oracledatabase/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-oracledatabase/docs/_templates/layout.html b/packages/google-cloud-oracledatabase/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/docs/conf.py b/packages/google-cloud-oracledatabase/docs/conf.py similarity index 88% rename from owl-bot-staging/google-cloud-oracledatabase/v1/docs/conf.py rename to packages/google-cloud-oracledatabase/docs/conf.py index 5c45cac86fd3..d3d4e9d5a5c4 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/docs/conf.py +++ b/packages/google-cloud-oracledatabase/docs/conf.py @@ -1,11 +1,11 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -# # google-cloud-oracledatabase documentation build configuration file # # This file is execfile()d with the current directory set to its @@ -25,21 +24,25 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys import os import shlex +import sys # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) -__version__ = "0.1.0" +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" +needs_sphinx = "1.5.5" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -49,26 +52,25 @@ "sphinx.ext.autosummary", "sphinx.ext.intersphinx", "sphinx.ext.coverage", + "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", + "recommonmark", ] # autodoc/autosummary flags autoclass_content = "both" -autodoc_default_flags = ["members"] +autodoc_default_options = {"members": True} autosummary_generate = True # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] source_suffix = [".rst", ".md"] # The encoding of source files. @@ -78,9 +80,9 @@ root_doc = "index" # General information about the project. -project = u"google-cloud-oracledatabase" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit +project = "google-cloud-oracledatabase" +copyright = "2019, Google" +author = "Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -96,7 +98,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = 'en' +language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: @@ -106,7 +108,13 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ["_build"] +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] # The reST default role (used for this markup: `text`) to use for all # documents. @@ -146,7 +154,7 @@ # further. For a list of options available for each theme, see the # documentation. html_theme_options = { - "description": "Google Cloud Client Libraries for Python", + "description": "Google Cloud Client Libraries for google-cloud-oracledatabase", "github_user": "googleapis", "github_repo": "google-cloud-python", "github_banner": True, @@ -258,13 +266,13 @@ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', + #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', + #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. - # 'preamble': '', + #'preamble': '', # Latex figure (float) alignment - # 'figure_align': 'htbp', + #'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples @@ -274,7 +282,7 @@ ( root_doc, "google-cloud-oracledatabase.tex", - u"google-cloud-oracledatabase Documentation", + "google-cloud-oracledatabase Documentation", author, "manual", ) @@ -309,7 +317,7 @@ ( root_doc, "google-cloud-oracledatabase", - u"Google Cloud Oracledatabase Documentation", + "google-cloud-oracledatabase Documentation", [author], 1, ) @@ -328,10 +336,10 @@ ( root_doc, "google-cloud-oracledatabase", - u"google-cloud-oracledatabase Documentation", + "google-cloud-oracledatabase Documentation", author, "google-cloud-oracledatabase", - "GAPIC library for Google Cloud Oracledatabase API", + "google-cloud-oracledatabase Library", "APIs", ) ] @@ -351,14 +359,14 @@ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), } diff --git a/packages/google-cloud-oracledatabase/docs/index.rst b/packages/google-cloud-oracledatabase/docs/index.rst new file mode 100644 index 000000000000..77ff04e09fa0 --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/index.rst @@ -0,0 +1,28 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + oracledatabase_v1/services_ + oracledatabase_v1/types_ + + +Changelog +--------- + +For a list of all ``google-cloud-oracledatabase`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG + +.. toctree:: + :hidden: + + summary_overview.md diff --git a/packages/google-cloud-oracledatabase/docs/multiprocessing.rst b/packages/google-cloud-oracledatabase/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/docs/oracledatabase_v1/oracle_database.rst b/packages/google-cloud-oracledatabase/docs/oracledatabase_v1/oracle_database.rst similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/docs/oracledatabase_v1/oracle_database.rst rename to packages/google-cloud-oracledatabase/docs/oracledatabase_v1/oracle_database.rst diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/docs/oracledatabase_v1/services_.rst b/packages/google-cloud-oracledatabase/docs/oracledatabase_v1/services_.rst similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/docs/oracledatabase_v1/services_.rst rename to packages/google-cloud-oracledatabase/docs/oracledatabase_v1/services_.rst diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/docs/oracledatabase_v1/types_.rst b/packages/google-cloud-oracledatabase/docs/oracledatabase_v1/types_.rst similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/docs/oracledatabase_v1/types_.rst rename to packages/google-cloud-oracledatabase/docs/oracledatabase_v1/types_.rst diff --git a/packages/google-cloud-oracledatabase/docs/summary_overview.md b/packages/google-cloud-oracledatabase/docs/summary_overview.md new file mode 100644 index 000000000000..02a3fcd382b8 --- /dev/null +++ b/packages/google-cloud-oracledatabase/docs/summary_overview.md @@ -0,0 +1,22 @@ +[ +This is a templated file. Adding content to this file may result in it being +reverted. Instead, if you want to place additional content, create an +"overview_content.md" file in `docs/` directory. The Sphinx tool will +pick up on the content and merge the content. +]: # + +# API + +Overview of the APIs available for API. + +## All entries + +Classes, methods and properties & attributes for + API. + +[classes](https://cloud.google.com/python/docs/reference/google-cloud-oracledatabase/latest/summary_class.html) + +[methods](https://cloud.google.com/python/docs/reference/google-cloud-oracledatabase/latest/summary_method.html) + +[properties and +attributes](https://cloud.google.com/python/docs/reference/google-cloud-oracledatabase/latest/summary_property.html) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/__init__.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/__init__.py new file mode 100644 index 000000000000..29f02c59e323 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/__init__.py @@ -0,0 +1,177 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.oracledatabase import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.oracledatabase_v1.services.oracle_database.client import ( + OracleDatabaseClient, +) +from google.cloud.oracledatabase_v1.types.autonomous_database import ( + AllConnectionStrings, + AutonomousDatabase, + AutonomousDatabaseApex, + AutonomousDatabaseConnectionStrings, + AutonomousDatabaseConnectionUrls, + AutonomousDatabaseProperties, + AutonomousDatabaseStandbySummary, + DatabaseConnectionStringProfile, + DBWorkload, + GenerateType, + OperationsInsightsState, + ScheduledOperationDetails, + State, +) +from google.cloud.oracledatabase_v1.types.autonomous_database_character_set import ( + AutonomousDatabaseCharacterSet, +) +from google.cloud.oracledatabase_v1.types.autonomous_db_backup import ( + AutonomousDatabaseBackup, + AutonomousDatabaseBackupProperties, +) +from google.cloud.oracledatabase_v1.types.autonomous_db_version import ( + AutonomousDbVersion, +) +from google.cloud.oracledatabase_v1.types.common import CustomerContact +from google.cloud.oracledatabase_v1.types.db_node import DbNode, DbNodeProperties +from google.cloud.oracledatabase_v1.types.db_server import DbServer, DbServerProperties +from google.cloud.oracledatabase_v1.types.db_system_shape import DbSystemShape +from google.cloud.oracledatabase_v1.types.entitlement import ( + CloudAccountDetails, + Entitlement, +) +from google.cloud.oracledatabase_v1.types.exadata_infra import ( + CloudExadataInfrastructure, + CloudExadataInfrastructureProperties, + MaintenanceWindow, +) +from google.cloud.oracledatabase_v1.types.gi_version import GiVersion +from google.cloud.oracledatabase_v1.types.location_metadata import LocationMetadata +from google.cloud.oracledatabase_v1.types.oracledatabase import ( + CreateAutonomousDatabaseRequest, + CreateCloudExadataInfrastructureRequest, + CreateCloudVmClusterRequest, + DeleteAutonomousDatabaseRequest, + DeleteCloudExadataInfrastructureRequest, + DeleteCloudVmClusterRequest, + GenerateAutonomousDatabaseWalletRequest, + GenerateAutonomousDatabaseWalletResponse, + GetAutonomousDatabaseRequest, + GetCloudExadataInfrastructureRequest, + GetCloudVmClusterRequest, + ListAutonomousDatabaseBackupsRequest, + ListAutonomousDatabaseBackupsResponse, + ListAutonomousDatabaseCharacterSetsRequest, + ListAutonomousDatabaseCharacterSetsResponse, + ListAutonomousDatabasesRequest, + ListAutonomousDatabasesResponse, + ListAutonomousDbVersionsRequest, + ListAutonomousDbVersionsResponse, + ListCloudExadataInfrastructuresRequest, + ListCloudExadataInfrastructuresResponse, + ListCloudVmClustersRequest, + ListCloudVmClustersResponse, + ListDbNodesRequest, + ListDbNodesResponse, + ListDbServersRequest, + ListDbServersResponse, + ListDbSystemShapesRequest, + ListDbSystemShapesResponse, + ListEntitlementsRequest, + ListEntitlementsResponse, + ListGiVersionsRequest, + ListGiVersionsResponse, + OperationMetadata, + RestoreAutonomousDatabaseRequest, +) +from google.cloud.oracledatabase_v1.types.vm_cluster import ( + CloudVmCluster, + CloudVmClusterProperties, + DataCollectionOptions, +) + +__all__ = ( + "OracleDatabaseClient", + "AllConnectionStrings", + "AutonomousDatabase", + "AutonomousDatabaseApex", + "AutonomousDatabaseConnectionStrings", + "AutonomousDatabaseConnectionUrls", + "AutonomousDatabaseProperties", + "AutonomousDatabaseStandbySummary", + "DatabaseConnectionStringProfile", + "ScheduledOperationDetails", + "DBWorkload", + "GenerateType", + "OperationsInsightsState", + "State", + "AutonomousDatabaseCharacterSet", + "AutonomousDatabaseBackup", + "AutonomousDatabaseBackupProperties", + "AutonomousDbVersion", + "CustomerContact", + "DbNode", + "DbNodeProperties", + "DbServer", + "DbServerProperties", + "DbSystemShape", + "CloudAccountDetails", + "Entitlement", + "CloudExadataInfrastructure", + "CloudExadataInfrastructureProperties", + "MaintenanceWindow", + "GiVersion", + "LocationMetadata", + "CreateAutonomousDatabaseRequest", + "CreateCloudExadataInfrastructureRequest", + "CreateCloudVmClusterRequest", + "DeleteAutonomousDatabaseRequest", + "DeleteCloudExadataInfrastructureRequest", + "DeleteCloudVmClusterRequest", + "GenerateAutonomousDatabaseWalletRequest", + "GenerateAutonomousDatabaseWalletResponse", + "GetAutonomousDatabaseRequest", + "GetCloudExadataInfrastructureRequest", + "GetCloudVmClusterRequest", + "ListAutonomousDatabaseBackupsRequest", + "ListAutonomousDatabaseBackupsResponse", + "ListAutonomousDatabaseCharacterSetsRequest", + "ListAutonomousDatabaseCharacterSetsResponse", + "ListAutonomousDatabasesRequest", + "ListAutonomousDatabasesResponse", + "ListAutonomousDbVersionsRequest", + "ListAutonomousDbVersionsResponse", + "ListCloudExadataInfrastructuresRequest", + "ListCloudExadataInfrastructuresResponse", + "ListCloudVmClustersRequest", + "ListCloudVmClustersResponse", + "ListDbNodesRequest", + "ListDbNodesResponse", + "ListDbServersRequest", + "ListDbServersResponse", + "ListDbSystemShapesRequest", + "ListDbSystemShapesResponse", + "ListEntitlementsRequest", + "ListEntitlementsResponse", + "ListGiVersionsRequest", + "ListGiVersionsResponse", + "OperationMetadata", + "RestoreAutonomousDatabaseRequest", + "CloudVmCluster", + "CloudVmClusterProperties", + "DataCollectionOptions", +) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase/gapic_version.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/gapic_version.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase/gapic_version.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase/gapic_version.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase/py.typed b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase/py.typed similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase/py.typed rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase/py.typed diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/__init__.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/__init__.py new file mode 100644 index 000000000000..225fa0bdbb4c --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/__init__.py @@ -0,0 +1,168 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.oracledatabase_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.oracle_database import OracleDatabaseClient +from .types.autonomous_database import ( + AllConnectionStrings, + AutonomousDatabase, + AutonomousDatabaseApex, + AutonomousDatabaseConnectionStrings, + AutonomousDatabaseConnectionUrls, + AutonomousDatabaseProperties, + AutonomousDatabaseStandbySummary, + DatabaseConnectionStringProfile, + DBWorkload, + GenerateType, + OperationsInsightsState, + ScheduledOperationDetails, + State, +) +from .types.autonomous_database_character_set import AutonomousDatabaseCharacterSet +from .types.autonomous_db_backup import ( + AutonomousDatabaseBackup, + AutonomousDatabaseBackupProperties, +) +from .types.autonomous_db_version import AutonomousDbVersion +from .types.common import CustomerContact +from .types.db_node import DbNode, DbNodeProperties +from .types.db_server import DbServer, DbServerProperties +from .types.db_system_shape import DbSystemShape +from .types.entitlement import CloudAccountDetails, Entitlement +from .types.exadata_infra import ( + CloudExadataInfrastructure, + CloudExadataInfrastructureProperties, + MaintenanceWindow, +) +from .types.gi_version import GiVersion +from .types.location_metadata import LocationMetadata +from .types.oracledatabase import ( + CreateAutonomousDatabaseRequest, + CreateCloudExadataInfrastructureRequest, + CreateCloudVmClusterRequest, + DeleteAutonomousDatabaseRequest, + DeleteCloudExadataInfrastructureRequest, + DeleteCloudVmClusterRequest, + GenerateAutonomousDatabaseWalletRequest, + GenerateAutonomousDatabaseWalletResponse, + GetAutonomousDatabaseRequest, + GetCloudExadataInfrastructureRequest, + GetCloudVmClusterRequest, + ListAutonomousDatabaseBackupsRequest, + ListAutonomousDatabaseBackupsResponse, + ListAutonomousDatabaseCharacterSetsRequest, + ListAutonomousDatabaseCharacterSetsResponse, + ListAutonomousDatabasesRequest, + ListAutonomousDatabasesResponse, + ListAutonomousDbVersionsRequest, + ListAutonomousDbVersionsResponse, + ListCloudExadataInfrastructuresRequest, + ListCloudExadataInfrastructuresResponse, + ListCloudVmClustersRequest, + ListCloudVmClustersResponse, + ListDbNodesRequest, + ListDbNodesResponse, + ListDbServersRequest, + ListDbServersResponse, + ListDbSystemShapesRequest, + ListDbSystemShapesResponse, + ListEntitlementsRequest, + ListEntitlementsResponse, + ListGiVersionsRequest, + ListGiVersionsResponse, + OperationMetadata, + RestoreAutonomousDatabaseRequest, +) +from .types.vm_cluster import ( + CloudVmCluster, + CloudVmClusterProperties, + DataCollectionOptions, +) + +__all__ = ( + "AllConnectionStrings", + "AutonomousDatabase", + "AutonomousDatabaseApex", + "AutonomousDatabaseBackup", + "AutonomousDatabaseBackupProperties", + "AutonomousDatabaseCharacterSet", + "AutonomousDatabaseConnectionStrings", + "AutonomousDatabaseConnectionUrls", + "AutonomousDatabaseProperties", + "AutonomousDatabaseStandbySummary", + "AutonomousDbVersion", + "CloudAccountDetails", + "CloudExadataInfrastructure", + "CloudExadataInfrastructureProperties", + "CloudVmCluster", + "CloudVmClusterProperties", + "CreateAutonomousDatabaseRequest", + "CreateCloudExadataInfrastructureRequest", + "CreateCloudVmClusterRequest", + "CustomerContact", + "DBWorkload", + "DataCollectionOptions", + "DatabaseConnectionStringProfile", + "DbNode", + "DbNodeProperties", + "DbServer", + "DbServerProperties", + "DbSystemShape", + "DeleteAutonomousDatabaseRequest", + "DeleteCloudExadataInfrastructureRequest", + "DeleteCloudVmClusterRequest", + "Entitlement", + "GenerateAutonomousDatabaseWalletRequest", + "GenerateAutonomousDatabaseWalletResponse", + "GenerateType", + "GetAutonomousDatabaseRequest", + "GetCloudExadataInfrastructureRequest", + "GetCloudVmClusterRequest", + "GiVersion", + "ListAutonomousDatabaseBackupsRequest", + "ListAutonomousDatabaseBackupsResponse", + "ListAutonomousDatabaseCharacterSetsRequest", + "ListAutonomousDatabaseCharacterSetsResponse", + "ListAutonomousDatabasesRequest", + "ListAutonomousDatabasesResponse", + "ListAutonomousDbVersionsRequest", + "ListAutonomousDbVersionsResponse", + "ListCloudExadataInfrastructuresRequest", + "ListCloudExadataInfrastructuresResponse", + "ListCloudVmClustersRequest", + "ListCloudVmClustersResponse", + "ListDbNodesRequest", + "ListDbNodesResponse", + "ListDbServersRequest", + "ListDbServersResponse", + "ListDbSystemShapesRequest", + "ListDbSystemShapesResponse", + "ListEntitlementsRequest", + "ListEntitlementsResponse", + "ListGiVersionsRequest", + "ListGiVersionsResponse", + "LocationMetadata", + "MaintenanceWindow", + "OperationMetadata", + "OperationsInsightsState", + "OracleDatabaseClient", + "RestoreAutonomousDatabaseRequest", + "ScheduledOperationDetails", + "State", +) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/gapic_metadata.json b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_metadata.json similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/gapic_metadata.json rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_metadata.json diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/gapic_version.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_version.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/gapic_version.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/gapic_version.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/py.typed b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/py.typed similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/py.typed rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/py.typed diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/__init__.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/__init__.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/__init__.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/__init__.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/__init__.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/__init__.py similarity index 93% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/__init__.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/__init__.py index 4b2d17bc5ef2..947b9516b5e7 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/__init__.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/__init__.py @@ -15,6 +15,4 @@ # from .client import OracleDatabaseClient -__all__ = ( - 'OracleDatabaseClient', -) +__all__ = ("OracleDatabaseClient",) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/client.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/client.py similarity index 79% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/client.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/client.py index f1332e871961..9a4182820e59 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/client.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/client.py @@ -16,20 +16,32 @@ from collections import OrderedDict import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) import warnings -from google.cloud.oracledatabase_v1 import gapic_version as package_version - from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.oracledatabase_v1 import gapic_version as package_version try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -38,25 +50,31 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.cloud.oracledatabase_v1.services.oracle_database import pagers -from google.cloud.oracledatabase_v1.types import autonomous_database -from google.cloud.oracledatabase_v1.types import autonomous_database as gco_autonomous_database -from google.cloud.oracledatabase_v1.types import autonomous_database_character_set -from google.cloud.oracledatabase_v1.types import autonomous_db_backup -from google.cloud.oracledatabase_v1.types import autonomous_db_version -from google.cloud.oracledatabase_v1.types import db_node -from google.cloud.oracledatabase_v1.types import db_server -from google.cloud.oracledatabase_v1.types import db_system_shape -from google.cloud.oracledatabase_v1.types import entitlement -from google.cloud.oracledatabase_v1.types import exadata_infra -from google.cloud.oracledatabase_v1.types import gi_version -from google.cloud.oracledatabase_v1.types import oracledatabase -from google.cloud.oracledatabase_v1.types import vm_cluster -from google.longrunning import operations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import OracleDatabaseTransport, DEFAULT_CLIENT_INFO + +from google.cloud.oracledatabase_v1.services.oracle_database import pagers +from google.cloud.oracledatabase_v1.types import ( + autonomous_database_character_set, + autonomous_db_backup, + autonomous_db_version, + db_node, + db_server, + db_system_shape, + entitlement, + exadata_infra, + gi_version, + oracledatabase, + vm_cluster, +) +from google.cloud.oracledatabase_v1.types import ( + autonomous_database as gco_autonomous_database, +) +from google.cloud.oracledatabase_v1.types import autonomous_database + +from .transports.base import DEFAULT_CLIENT_INFO, OracleDatabaseTransport from .transports.rest import OracleDatabaseRestTransport @@ -67,12 +85,16 @@ class OracleDatabaseClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ - _transport_registry = OrderedDict() # type: Dict[str, Type[OracleDatabaseTransport]] + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[OracleDatabaseTransport]] _transport_registry["rest"] = OracleDatabaseRestTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[OracleDatabaseTransport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[OracleDatabaseTransport]: """Returns an appropriate transport class. Args: @@ -164,8 +186,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: OracleDatabaseClient: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) + credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -182,194 +203,353 @@ def transport(self) -> OracleDatabaseTransport: return self._transport @staticmethod - def autonomous_database_path(project: str,location: str,autonomous_database: str,) -> str: + def autonomous_database_path( + project: str, + location: str, + autonomous_database: str, + ) -> str: """Returns a fully-qualified autonomous_database string.""" - return "projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}".format(project=project, location=location, autonomous_database=autonomous_database, ) + return "projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}".format( + project=project, + location=location, + autonomous_database=autonomous_database, + ) @staticmethod - def parse_autonomous_database_path(path: str) -> Dict[str,str]: + def parse_autonomous_database_path(path: str) -> Dict[str, str]: """Parses a autonomous_database path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/autonomousDatabases/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/autonomousDatabases/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def autonomous_database_backup_path(project: str,location: str,autonomous_database_backup: str,) -> str: + def autonomous_database_backup_path( + project: str, + location: str, + autonomous_database_backup: str, + ) -> str: """Returns a fully-qualified autonomous_database_backup string.""" - return "projects/{project}/locations/{location}/autonomousDatabaseBackups/{autonomous_database_backup}".format(project=project, location=location, autonomous_database_backup=autonomous_database_backup, ) + return "projects/{project}/locations/{location}/autonomousDatabaseBackups/{autonomous_database_backup}".format( + project=project, + location=location, + autonomous_database_backup=autonomous_database_backup, + ) @staticmethod - def parse_autonomous_database_backup_path(path: str) -> Dict[str,str]: + def parse_autonomous_database_backup_path(path: str) -> Dict[str, str]: """Parses a autonomous_database_backup path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/autonomousDatabaseBackups/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/autonomousDatabaseBackups/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def autonomous_database_character_set_path(project: str,location: str,autonomous_database_character_set: str,) -> str: + def autonomous_database_character_set_path( + project: str, + location: str, + autonomous_database_character_set: str, + ) -> str: """Returns a fully-qualified autonomous_database_character_set string.""" - return "projects/{project}/locations/{location}/autonomousDatabaseCharacterSets/{autonomous_database_character_set}".format(project=project, location=location, autonomous_database_character_set=autonomous_database_character_set, ) + return "projects/{project}/locations/{location}/autonomousDatabaseCharacterSets/{autonomous_database_character_set}".format( + project=project, + location=location, + autonomous_database_character_set=autonomous_database_character_set, + ) @staticmethod - def parse_autonomous_database_character_set_path(path: str) -> Dict[str,str]: + def parse_autonomous_database_character_set_path(path: str) -> Dict[str, str]: """Parses a autonomous_database_character_set path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/autonomousDatabaseCharacterSets/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/autonomousDatabaseCharacterSets/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def autonomous_db_version_path(project: str,location: str,autonomous_db_version: str,) -> str: + def autonomous_db_version_path( + project: str, + location: str, + autonomous_db_version: str, + ) -> str: """Returns a fully-qualified autonomous_db_version string.""" - return "projects/{project}/locations/{location}/autonomousDbVersions/{autonomous_db_version}".format(project=project, location=location, autonomous_db_version=autonomous_db_version, ) + return "projects/{project}/locations/{location}/autonomousDbVersions/{autonomous_db_version}".format( + project=project, + location=location, + autonomous_db_version=autonomous_db_version, + ) @staticmethod - def parse_autonomous_db_version_path(path: str) -> Dict[str,str]: + def parse_autonomous_db_version_path(path: str) -> Dict[str, str]: """Parses a autonomous_db_version path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/autonomousDbVersions/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/autonomousDbVersions/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def cloud_exadata_infrastructure_path(project: str,location: str,cloud_exadata_infrastructure: str,) -> str: + def cloud_exadata_infrastructure_path( + project: str, + location: str, + cloud_exadata_infrastructure: str, + ) -> str: """Returns a fully-qualified cloud_exadata_infrastructure string.""" - return "projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}".format(project=project, location=location, cloud_exadata_infrastructure=cloud_exadata_infrastructure, ) + return "projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}".format( + project=project, + location=location, + cloud_exadata_infrastructure=cloud_exadata_infrastructure, + ) @staticmethod - def parse_cloud_exadata_infrastructure_path(path: str) -> Dict[str,str]: + def parse_cloud_exadata_infrastructure_path(path: str) -> Dict[str, str]: """Parses a cloud_exadata_infrastructure path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/cloudExadataInfrastructures/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/cloudExadataInfrastructures/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def cloud_vm_cluster_path(project: str,location: str,cloud_vm_cluster: str,) -> str: + def cloud_vm_cluster_path( + project: str, + location: str, + cloud_vm_cluster: str, + ) -> str: """Returns a fully-qualified cloud_vm_cluster string.""" - return "projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}".format(project=project, location=location, cloud_vm_cluster=cloud_vm_cluster, ) + return "projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}".format( + project=project, + location=location, + cloud_vm_cluster=cloud_vm_cluster, + ) @staticmethod - def parse_cloud_vm_cluster_path(path: str) -> Dict[str,str]: + def parse_cloud_vm_cluster_path(path: str) -> Dict[str, str]: """Parses a cloud_vm_cluster path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/cloudVmClusters/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/cloudVmClusters/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def db_node_path(project: str,location: str,cloud_vm_cluster: str,db_node: str,) -> str: + def db_node_path( + project: str, + location: str, + cloud_vm_cluster: str, + db_node: str, + ) -> str: """Returns a fully-qualified db_node string.""" - return "projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}/dbNodes/{db_node}".format(project=project, location=location, cloud_vm_cluster=cloud_vm_cluster, db_node=db_node, ) + return "projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}/dbNodes/{db_node}".format( + project=project, + location=location, + cloud_vm_cluster=cloud_vm_cluster, + db_node=db_node, + ) @staticmethod - def parse_db_node_path(path: str) -> Dict[str,str]: + def parse_db_node_path(path: str) -> Dict[str, str]: """Parses a db_node path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/cloudVmClusters/(?P.+?)/dbNodes/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/cloudVmClusters/(?P.+?)/dbNodes/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def db_server_path(project: str,location: str,cloud_exadata_infrastructure: str,db_server: str,) -> str: + def db_server_path( + project: str, + location: str, + cloud_exadata_infrastructure: str, + db_server: str, + ) -> str: """Returns a fully-qualified db_server string.""" - return "projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}/dbServers/{db_server}".format(project=project, location=location, cloud_exadata_infrastructure=cloud_exadata_infrastructure, db_server=db_server, ) + return "projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}/dbServers/{db_server}".format( + project=project, + location=location, + cloud_exadata_infrastructure=cloud_exadata_infrastructure, + db_server=db_server, + ) @staticmethod - def parse_db_server_path(path: str) -> Dict[str,str]: + def parse_db_server_path(path: str) -> Dict[str, str]: """Parses a db_server path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/cloudExadataInfrastructures/(?P.+?)/dbServers/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/cloudExadataInfrastructures/(?P.+?)/dbServers/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def db_system_shape_path(project: str,location: str,db_system_shape: str,) -> str: + def db_system_shape_path( + project: str, + location: str, + db_system_shape: str, + ) -> str: """Returns a fully-qualified db_system_shape string.""" - return "projects/{project}/locations/{location}/dbSystemShapes/{db_system_shape}".format(project=project, location=location, db_system_shape=db_system_shape, ) + return "projects/{project}/locations/{location}/dbSystemShapes/{db_system_shape}".format( + project=project, + location=location, + db_system_shape=db_system_shape, + ) @staticmethod - def parse_db_system_shape_path(path: str) -> Dict[str,str]: + def parse_db_system_shape_path(path: str) -> Dict[str, str]: """Parses a db_system_shape path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/dbSystemShapes/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dbSystemShapes/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def entitlement_path(project: str,location: str,entitlement: str,) -> str: + def entitlement_path( + project: str, + location: str, + entitlement: str, + ) -> str: """Returns a fully-qualified entitlement string.""" - return "projects/{project}/locations/{location}/entitlements/{entitlement}".format(project=project, location=location, entitlement=entitlement, ) + return ( + "projects/{project}/locations/{location}/entitlements/{entitlement}".format( + project=project, + location=location, + entitlement=entitlement, + ) + ) @staticmethod - def parse_entitlement_path(path: str) -> Dict[str,str]: + def parse_entitlement_path(path: str) -> Dict[str, str]: """Parses a entitlement path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/entitlements/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/entitlements/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def gi_version_path(project: str,location: str,gi_version: str,) -> str: + def gi_version_path( + project: str, + location: str, + gi_version: str, + ) -> str: """Returns a fully-qualified gi_version string.""" - return "projects/{project}/locations/{location}/giVersions/{gi_version}".format(project=project, location=location, gi_version=gi_version, ) + return "projects/{project}/locations/{location}/giVersions/{gi_version}".format( + project=project, + location=location, + gi_version=gi_version, + ) @staticmethod - def parse_gi_version_path(path: str) -> Dict[str,str]: + def parse_gi_version_path(path: str) -> Dict[str, str]: """Parses a gi_version path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/giVersions/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/giVersions/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def network_path(project: str,network: str,) -> str: + def network_path( + project: str, + network: str, + ) -> str: """Returns a fully-qualified network string.""" - return "projects/{project}/global/networks/{network}".format(project=project, network=network, ) + return "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) @staticmethod - def parse_network_path(path: str) -> Dict[str,str]: + def parse_network_path(path: str) -> Dict[str, str]: """Parses a network path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/global/networks/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/global/networks/(?P.+?)$", path + ) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path(path: str) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path(path: str) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path(path: str) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path(path: str) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path(path: str) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -401,16 +581,22 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Figure out the client cert source to use. client_cert_source = None @@ -423,7 +609,9 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio # Figure out which api endpoint to use. if client_options.api_endpoint is not None: api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): api_endpoint = cls.DEFAULT_MTLS_ENDPOINT else: api_endpoint = cls.DEFAULT_ENDPOINT @@ -444,13 +632,19 @@ def _read_environment_variables(): google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT is not any of ["auto", "never", "always"]. """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) return use_client_cert == "true", use_mtls_endpoint, universe_domain_env @staticmethod @@ -473,7 +667,9 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): return client_cert_source @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): """Return the API endpoint used by the client. Args: @@ -489,17 +685,25 @@ def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtl """ if api_override is not None: api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): _default_universe = OracleDatabaseClient._DEFAULT_UNIVERSE if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) api_endpoint = OracleDatabaseClient.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + api_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) return api_endpoint @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: """Return the universe domain used by the client. Args: @@ -522,8 +726,9 @@ def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_ return universe_domain @staticmethod - def _compare_universes(client_universe: str, - credentials: ga_credentials.Credentials) -> bool: + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: """Returns True iff the universe domains used by the client and credentials match. Args: @@ -541,11 +746,13 @@ def _compare_universes(client_universe: str, credentials_universe = getattr(credentials, "universe_domain", default_universe) if client_universe != credentials_universe: - raise ValueError("The configured universe domain " + raise ValueError( + "The configured universe domain " f"({client_universe}) does not match the universe domain " f"found in the credentials ({credentials_universe}). " "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") + f"`{default_universe}` is the default." + ) return True def _validate_universe_domain(self): @@ -557,8 +764,12 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = (self._is_universe_domain_valid or - OracleDatabaseClient._compare_universes(self.universe_domain, self.transport._credentials)) + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or OracleDatabaseClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) return self._is_universe_domain_valid @property @@ -579,12 +790,16 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, OracleDatabaseTransport, Callable[..., OracleDatabaseTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, OracleDatabaseTransport, Callable[..., OracleDatabaseTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the oracle database client. Args: @@ -639,21 +854,33 @@ def __init__(self, *, self._client_options = client_options_lib.from_dict(self._client_options) if self._client_options is None: self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = OracleDatabaseClient._read_environment_variables() - self._client_cert_source = OracleDatabaseClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = OracleDatabaseClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = OracleDatabaseClient._read_environment_variables() + self._client_cert_source = OracleDatabaseClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = OracleDatabaseClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` # Initialize the universe domain validation. self._is_universe_domain_valid = False api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport @@ -662,8 +889,10 @@ def __init__(self, *, if transport_provided: # transport is a OracleDatabaseTransport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " @@ -672,20 +901,29 @@ def __init__(self, *, self._transport = cast(OracleDatabaseTransport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - OracleDatabaseClient._get_api_endpoint( + self._api_endpoint = ( + self._api_endpoint + or OracleDatabaseClient._get_api_endpoint( self._client_options.api_endpoint, self._client_cert_source, self._universe_domain, - self._use_mtls_endpoint)) + self._use_mtls_endpoint, + ) + ) if not transport_provided: import google.auth._default # type: ignore - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) - transport_init: Union[Type[OracleDatabaseTransport], Callable[..., OracleDatabaseTransport]] = ( + transport_init: Union[ + Type[OracleDatabaseTransport], Callable[..., OracleDatabaseTransport] + ] = ( OracleDatabaseClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., OracleDatabaseTransport], transport) @@ -703,14 +941,17 @@ def __init__(self, *, api_audience=self._client_options.api_audience, ) - def list_cloud_exadata_infrastructures(self, - request: Optional[Union[oracledatabase.ListCloudExadataInfrastructuresRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListCloudExadataInfrastructuresPager: + def list_cloud_exadata_infrastructures( + self, + request: Optional[ + Union[oracledatabase.ListCloudExadataInfrastructuresRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCloudExadataInfrastructuresPager: r"""Lists Exadata Infrastructures in a given project and location. @@ -772,12 +1013,16 @@ def sample_list_cloud_exadata_infrastructures(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, oracledatabase.ListCloudExadataInfrastructuresRequest): + if not isinstance( + request, oracledatabase.ListCloudExadataInfrastructuresRequest + ): request = oracledatabase.ListCloudExadataInfrastructuresRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -786,14 +1031,14 @@ def sample_list_cloud_exadata_infrastructures(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_cloud_exadata_infrastructures] + rpc = self._transport._wrapped_methods[ + self._transport.list_cloud_exadata_infrastructures + ] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -821,14 +1066,17 @@ def sample_list_cloud_exadata_infrastructures(): # Done; return the response. return response - def get_cloud_exadata_infrastructure(self, - request: Optional[Union[oracledatabase.GetCloudExadataInfrastructureRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> exadata_infra.CloudExadataInfrastructure: + def get_cloud_exadata_infrastructure( + self, + request: Optional[ + Union[oracledatabase.GetCloudExadataInfrastructureRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> exadata_infra.CloudExadataInfrastructure: r"""Gets details of a single Exadata Infrastructure. .. code-block:: python @@ -886,8 +1134,10 @@ def sample_get_cloud_exadata_infrastructure(): # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -900,14 +1150,14 @@ def sample_get_cloud_exadata_infrastructure(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_cloud_exadata_infrastructure] + rpc = self._transport._wrapped_methods[ + self._transport.get_cloud_exadata_infrastructure + ] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -924,16 +1174,21 @@ def sample_get_cloud_exadata_infrastructure(): # Done; return the response. return response - def create_cloud_exadata_infrastructure(self, - request: Optional[Union[oracledatabase.CreateCloudExadataInfrastructureRequest, dict]] = None, - *, - parent: Optional[str] = None, - cloud_exadata_infrastructure: Optional[exadata_infra.CloudExadataInfrastructure] = None, - cloud_exadata_infrastructure_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def create_cloud_exadata_infrastructure( + self, + request: Optional[ + Union[oracledatabase.CreateCloudExadataInfrastructureRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + cloud_exadata_infrastructure: Optional[ + exadata_infra.CloudExadataInfrastructure + ] = None, + cloud_exadata_infrastructure_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Creates a new Exadata Infrastructure in a given project and location. @@ -1014,14 +1269,20 @@ def sample_create_cloud_exadata_infrastructure(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, cloud_exadata_infrastructure, cloud_exadata_infrastructure_id]) + has_flattened_params = any( + [parent, cloud_exadata_infrastructure, cloud_exadata_infrastructure_id] + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, oracledatabase.CreateCloudExadataInfrastructureRequest): + if not isinstance( + request, oracledatabase.CreateCloudExadataInfrastructureRequest + ): request = oracledatabase.CreateCloudExadataInfrastructureRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1030,18 +1291,20 @@ def sample_create_cloud_exadata_infrastructure(): if cloud_exadata_infrastructure is not None: request.cloud_exadata_infrastructure = cloud_exadata_infrastructure if cloud_exadata_infrastructure_id is not None: - request.cloud_exadata_infrastructure_id = cloud_exadata_infrastructure_id + request.cloud_exadata_infrastructure_id = ( + cloud_exadata_infrastructure_id + ) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_cloud_exadata_infrastructure] + rpc = self._transport._wrapped_methods[ + self._transport.create_cloud_exadata_infrastructure + ] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1066,14 +1329,17 @@ def sample_create_cloud_exadata_infrastructure(): # Done; return the response. return response - def delete_cloud_exadata_infrastructure(self, - request: Optional[Union[oracledatabase.DeleteCloudExadataInfrastructureRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def delete_cloud_exadata_infrastructure( + self, + request: Optional[ + Union[oracledatabase.DeleteCloudExadataInfrastructureRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Deletes a single Exadata Infrastructure. .. code-block:: python @@ -1144,12 +1410,16 @@ def sample_delete_cloud_exadata_infrastructure(): # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, oracledatabase.DeleteCloudExadataInfrastructureRequest): + if not isinstance( + request, oracledatabase.DeleteCloudExadataInfrastructureRequest + ): request = oracledatabase.DeleteCloudExadataInfrastructureRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1158,14 +1428,14 @@ def sample_delete_cloud_exadata_infrastructure(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_cloud_exadata_infrastructure] + rpc = self._transport._wrapped_methods[ + self._transport.delete_cloud_exadata_infrastructure + ] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1190,14 +1460,17 @@ def sample_delete_cloud_exadata_infrastructure(): # Done; return the response. return response - def list_cloud_vm_clusters(self, - request: Optional[Union[oracledatabase.ListCloudVmClustersRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListCloudVmClustersPager: + def list_cloud_vm_clusters( + self, + request: Optional[ + Union[oracledatabase.ListCloudVmClustersRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCloudVmClustersPager: r"""Lists the VM Clusters in a given project and location. @@ -1258,8 +1531,10 @@ def sample_list_cloud_vm_clusters(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1274,12 +1549,10 @@ def sample_list_cloud_vm_clusters(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_cloud_vm_clusters] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1307,14 +1580,15 @@ def sample_list_cloud_vm_clusters(): # Done; return the response. return response - def get_cloud_vm_cluster(self, - request: Optional[Union[oracledatabase.GetCloudVmClusterRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> vm_cluster.CloudVmCluster: + def get_cloud_vm_cluster( + self, + request: Optional[Union[oracledatabase.GetCloudVmClusterRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vm_cluster.CloudVmCluster: r"""Gets details of a single VM Cluster. .. code-block:: python @@ -1372,8 +1646,10 @@ def sample_get_cloud_vm_cluster(): # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1388,12 +1664,10 @@ def sample_get_cloud_vm_cluster(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_cloud_vm_cluster] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1410,16 +1684,19 @@ def sample_get_cloud_vm_cluster(): # Done; return the response. return response - def create_cloud_vm_cluster(self, - request: Optional[Union[oracledatabase.CreateCloudVmClusterRequest, dict]] = None, - *, - parent: Optional[str] = None, - cloud_vm_cluster: Optional[vm_cluster.CloudVmCluster] = None, - cloud_vm_cluster_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def create_cloud_vm_cluster( + self, + request: Optional[ + Union[oracledatabase.CreateCloudVmClusterRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + cloud_vm_cluster: Optional[vm_cluster.CloudVmCluster] = None, + cloud_vm_cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Creates a new VM Cluster in a given project and location. @@ -1506,8 +1783,10 @@ def sample_create_cloud_vm_cluster(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, cloud_vm_cluster, cloud_vm_cluster_id]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1526,12 +1805,10 @@ def sample_create_cloud_vm_cluster(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_cloud_vm_cluster] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1556,14 +1833,17 @@ def sample_create_cloud_vm_cluster(): # Done; return the response. return response - def delete_cloud_vm_cluster(self, - request: Optional[Union[oracledatabase.DeleteCloudVmClusterRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def delete_cloud_vm_cluster( + self, + request: Optional[ + Union[oracledatabase.DeleteCloudVmClusterRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Deletes a single VM Cluster. .. code-block:: python @@ -1634,8 +1914,10 @@ def sample_delete_cloud_vm_cluster(): # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1650,12 +1932,10 @@ def sample_delete_cloud_vm_cluster(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_cloud_vm_cluster] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1680,14 +1960,15 @@ def sample_delete_cloud_vm_cluster(): # Done; return the response. return response - def list_entitlements(self, - request: Optional[Union[oracledatabase.ListEntitlementsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListEntitlementsPager: + def list_entitlements( + self, + request: Optional[Union[oracledatabase.ListEntitlementsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEntitlementsPager: r"""Lists the entitlements in a given project. .. code-block:: python @@ -1747,8 +2028,10 @@ def sample_list_entitlements(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1763,12 +2046,10 @@ def sample_list_entitlements(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_entitlements] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1796,14 +2077,15 @@ def sample_list_entitlements(): # Done; return the response. return response - def list_db_servers(self, - request: Optional[Union[oracledatabase.ListDbServersRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDbServersPager: + def list_db_servers( + self, + request: Optional[Union[oracledatabase.ListDbServersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDbServersPager: r"""Lists the database servers of an Exadata Infrastructure instance. @@ -1864,8 +2146,10 @@ def sample_list_db_servers(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1880,12 +2164,10 @@ def sample_list_db_servers(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_db_servers] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1913,14 +2195,15 @@ def sample_list_db_servers(): # Done; return the response. return response - def list_db_nodes(self, - request: Optional[Union[oracledatabase.ListDbNodesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDbNodesPager: + def list_db_nodes( + self, + request: Optional[Union[oracledatabase.ListDbNodesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDbNodesPager: r"""Lists the database nodes of a VM Cluster. .. code-block:: python @@ -1980,8 +2263,10 @@ def sample_list_db_nodes(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1996,12 +2281,10 @@ def sample_list_db_nodes(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_db_nodes] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2029,14 +2312,15 @@ def sample_list_db_nodes(): # Done; return the response. return response - def list_gi_versions(self, - request: Optional[Union[oracledatabase.ListGiVersionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListGiVersionsPager: + def list_gi_versions( + self, + request: Optional[Union[oracledatabase.ListGiVersionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListGiVersionsPager: r"""Lists all the valid Oracle Grid Infrastructure (GI) versions for the given project and location. @@ -2098,8 +2382,10 @@ def sample_list_gi_versions(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2114,12 +2400,10 @@ def sample_list_gi_versions(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_gi_versions] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2147,14 +2431,15 @@ def sample_list_gi_versions(): # Done; return the response. return response - def list_db_system_shapes(self, - request: Optional[Union[oracledatabase.ListDbSystemShapesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDbSystemShapesPager: + def list_db_system_shapes( + self, + request: Optional[Union[oracledatabase.ListDbSystemShapesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDbSystemShapesPager: r"""Lists the database system shapes available for the project and location. @@ -2216,8 +2501,10 @@ def sample_list_db_system_shapes(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2232,12 +2519,10 @@ def sample_list_db_system_shapes(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_db_system_shapes] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2265,14 +2550,17 @@ def sample_list_db_system_shapes(): # Done; return the response. return response - def list_autonomous_databases(self, - request: Optional[Union[oracledatabase.ListAutonomousDatabasesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAutonomousDatabasesPager: + def list_autonomous_databases( + self, + request: Optional[ + Union[oracledatabase.ListAutonomousDatabasesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAutonomousDatabasesPager: r"""Lists the Autonomous Databases in a given project and location. @@ -2334,8 +2622,10 @@ def sample_list_autonomous_databases(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2348,14 +2638,14 @@ def sample_list_autonomous_databases(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_autonomous_databases] + rpc = self._transport._wrapped_methods[ + self._transport.list_autonomous_databases + ] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2383,14 +2673,17 @@ def sample_list_autonomous_databases(): # Done; return the response. return response - def get_autonomous_database(self, - request: Optional[Union[oracledatabase.GetAutonomousDatabaseRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> autonomous_database.AutonomousDatabase: + def get_autonomous_database( + self, + request: Optional[ + Union[oracledatabase.GetAutonomousDatabaseRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autonomous_database.AutonomousDatabase: r"""Gets the details of a single Autonomous Database. .. code-block:: python @@ -2448,8 +2741,10 @@ def sample_get_autonomous_database(): # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2464,12 +2759,10 @@ def sample_get_autonomous_database(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_autonomous_database] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2486,16 +2779,21 @@ def sample_get_autonomous_database(): # Done; return the response. return response - def create_autonomous_database(self, - request: Optional[Union[oracledatabase.CreateAutonomousDatabaseRequest, dict]] = None, - *, - parent: Optional[str] = None, - autonomous_database: Optional[gco_autonomous_database.AutonomousDatabase] = None, - autonomous_database_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def create_autonomous_database( + self, + request: Optional[ + Union[oracledatabase.CreateAutonomousDatabaseRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + autonomous_database: Optional[ + gco_autonomous_database.AutonomousDatabase + ] = None, + autonomous_database_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Creates a new Autonomous Database in a given project and location. @@ -2580,10 +2878,14 @@ def sample_create_autonomous_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, autonomous_database, autonomous_database_id]) + has_flattened_params = any( + [parent, autonomous_database, autonomous_database_id] + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2600,14 +2902,14 @@ def sample_create_autonomous_database(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_autonomous_database] + rpc = self._transport._wrapped_methods[ + self._transport.create_autonomous_database + ] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2632,14 +2934,17 @@ def sample_create_autonomous_database(): # Done; return the response. return response - def delete_autonomous_database(self, - request: Optional[Union[oracledatabase.DeleteAutonomousDatabaseRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def delete_autonomous_database( + self, + request: Optional[ + Union[oracledatabase.DeleteAutonomousDatabaseRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Deletes a single Autonomous Database. .. code-block:: python @@ -2710,8 +3015,10 @@ def sample_delete_autonomous_database(): # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2724,14 +3031,14 @@ def sample_delete_autonomous_database(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_autonomous_database] + rpc = self._transport._wrapped_methods[ + self._transport.delete_autonomous_database + ] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2756,15 +3063,18 @@ def sample_delete_autonomous_database(): # Done; return the response. return response - def restore_autonomous_database(self, - request: Optional[Union[oracledatabase.RestoreAutonomousDatabaseRequest, dict]] = None, - *, - name: Optional[str] = None, - restore_time: Optional[timestamp_pb2.Timestamp] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def restore_autonomous_database( + self, + request: Optional[ + Union[oracledatabase.RestoreAutonomousDatabaseRequest, dict] + ] = None, + *, + name: Optional[str] = None, + restore_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Restores a single Autonomous Database. .. code-block:: python @@ -2834,8 +3144,10 @@ def sample_restore_autonomous_database(): # gotten any keyword arguments that map to the request. has_flattened_params = any([name, restore_time]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2850,14 +3162,14 @@ def sample_restore_autonomous_database(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.restore_autonomous_database] + rpc = self._transport._wrapped_methods[ + self._transport.restore_autonomous_database + ] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2882,17 +3194,20 @@ def sample_restore_autonomous_database(): # Done; return the response. return response - def generate_autonomous_database_wallet(self, - request: Optional[Union[oracledatabase.GenerateAutonomousDatabaseWalletRequest, dict]] = None, - *, - name: Optional[str] = None, - type_: Optional[autonomous_database.GenerateType] = None, - is_regional: Optional[bool] = None, - password: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> oracledatabase.GenerateAutonomousDatabaseWalletResponse: + def generate_autonomous_database_wallet( + self, + request: Optional[ + Union[oracledatabase.GenerateAutonomousDatabaseWalletRequest, dict] + ] = None, + *, + name: Optional[str] = None, + type_: Optional[autonomous_database.GenerateType] = None, + is_regional: Optional[bool] = None, + password: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.GenerateAutonomousDatabaseWalletResponse: r"""Generates a wallet for an Autonomous Database. .. code-block:: python @@ -2974,12 +3289,16 @@ def sample_generate_autonomous_database_wallet(): # gotten any keyword arguments that map to the request. has_flattened_params = any([name, type_, is_regional, password]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, oracledatabase.GenerateAutonomousDatabaseWalletRequest): + if not isinstance( + request, oracledatabase.GenerateAutonomousDatabaseWalletRequest + ): request = oracledatabase.GenerateAutonomousDatabaseWalletRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2994,14 +3313,14 @@ def sample_generate_autonomous_database_wallet(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.generate_autonomous_database_wallet] + rpc = self._transport._wrapped_methods[ + self._transport.generate_autonomous_database_wallet + ] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3018,14 +3337,17 @@ def sample_generate_autonomous_database_wallet(): # Done; return the response. return response - def list_autonomous_db_versions(self, - request: Optional[Union[oracledatabase.ListAutonomousDbVersionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAutonomousDbVersionsPager: + def list_autonomous_db_versions( + self, + request: Optional[ + Union[oracledatabase.ListAutonomousDbVersionsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAutonomousDbVersionsPager: r"""Lists all the available Autonomous Database versions for a project and location. @@ -3087,8 +3409,10 @@ def sample_list_autonomous_db_versions(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3101,14 +3425,14 @@ def sample_list_autonomous_db_versions(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_autonomous_db_versions] + rpc = self._transport._wrapped_methods[ + self._transport.list_autonomous_db_versions + ] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -3136,14 +3460,17 @@ def sample_list_autonomous_db_versions(): # Done; return the response. return response - def list_autonomous_database_character_sets(self, - request: Optional[Union[oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAutonomousDatabaseCharacterSetsPager: + def list_autonomous_database_character_sets( + self, + request: Optional[ + Union[oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAutonomousDatabaseCharacterSetsPager: r"""Lists Autonomous Database Character Sets in a given project and location. @@ -3205,12 +3532,16 @@ def sample_list_autonomous_database_character_sets(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, oracledatabase.ListAutonomousDatabaseCharacterSetsRequest): + if not isinstance( + request, oracledatabase.ListAutonomousDatabaseCharacterSetsRequest + ): request = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3219,14 +3550,14 @@ def sample_list_autonomous_database_character_sets(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_autonomous_database_character_sets] + rpc = self._transport._wrapped_methods[ + self._transport.list_autonomous_database_character_sets + ] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -3254,14 +3585,17 @@ def sample_list_autonomous_database_character_sets(): # Done; return the response. return response - def list_autonomous_database_backups(self, - request: Optional[Union[oracledatabase.ListAutonomousDatabaseBackupsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAutonomousDatabaseBackupsPager: + def list_autonomous_database_backups( + self, + request: Optional[ + Union[oracledatabase.ListAutonomousDatabaseBackupsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAutonomousDatabaseBackupsPager: r"""Lists the long-term and automatic backups of an Autonomous Database. @@ -3323,8 +3657,10 @@ def sample_list_autonomous_database_backups(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3337,14 +3673,14 @@ def sample_list_autonomous_database_backups(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_autonomous_database_backups] + rpc = self._transport._wrapped_methods[ + self._transport.list_autonomous_database_backups + ] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -3425,8 +3761,7 @@ def list_operations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3434,7 +3769,11 @@ def list_operations( # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3479,8 +3818,7 @@ def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3488,7 +3826,11 @@ def get_operation( # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3537,15 +3879,19 @@ def delete_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) def cancel_operation( self, @@ -3590,15 +3936,19 @@ def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) def get_location( self, @@ -3640,8 +3990,7 @@ def get_location( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3649,7 +3998,11 @@ def get_location( # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3694,8 +4047,7 @@ def list_locations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3703,15 +4055,19 @@ def list_locations( # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) -__all__ = ( - "OracleDatabaseClient", -) +__all__ = ("OracleDatabaseClient",) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/pagers.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/pagers.py similarity index 76% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/pagers.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/pagers.py index 8e1634d718bd..111ec9a9d392 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/pagers.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/pagers.py @@ -13,29 +13,45 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + from google.api_core import gapic_v1 from google.api_core import retry as retries from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.cloud.oracledatabase_v1.types import autonomous_database -from google.cloud.oracledatabase_v1.types import autonomous_database_character_set -from google.cloud.oracledatabase_v1.types import autonomous_db_backup -from google.cloud.oracledatabase_v1.types import autonomous_db_version -from google.cloud.oracledatabase_v1.types import db_node -from google.cloud.oracledatabase_v1.types import db_server -from google.cloud.oracledatabase_v1.types import db_system_shape -from google.cloud.oracledatabase_v1.types import entitlement -from google.cloud.oracledatabase_v1.types import exadata_infra -from google.cloud.oracledatabase_v1.types import gi_version -from google.cloud.oracledatabase_v1.types import oracledatabase -from google.cloud.oracledatabase_v1.types import vm_cluster +from google.cloud.oracledatabase_v1.types import ( + autonomous_database, + autonomous_database_character_set, + autonomous_db_backup, + autonomous_db_version, + db_node, + db_server, + db_system_shape, + entitlement, + exadata_infra, + gi_version, + oracledatabase, + vm_cluster, +) class ListCloudExadataInfrastructuresPager: @@ -55,14 +71,17 @@ class ListCloudExadataInfrastructuresPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., oracledatabase.ListCloudExadataInfrastructuresResponse], - request: oracledatabase.ListCloudExadataInfrastructuresRequest, - response: oracledatabase.ListCloudExadataInfrastructuresResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[..., oracledatabase.ListCloudExadataInfrastructuresResponse], + request: oracledatabase.ListCloudExadataInfrastructuresRequest, + response: oracledatabase.ListCloudExadataInfrastructuresResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -93,7 +112,12 @@ def pages(self) -> Iterator[oracledatabase.ListCloudExadataInfrastructuresRespon yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[exadata_infra.CloudExadataInfrastructure]: @@ -101,7 +125,7 @@ def __iter__(self) -> Iterator[exadata_infra.CloudExadataInfrastructure]: yield from page.cloud_exadata_infrastructures def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListCloudVmClustersPager: @@ -121,14 +145,17 @@ class ListCloudVmClustersPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., oracledatabase.ListCloudVmClustersResponse], - request: oracledatabase.ListCloudVmClustersRequest, - response: oracledatabase.ListCloudVmClustersResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[..., oracledatabase.ListCloudVmClustersResponse], + request: oracledatabase.ListCloudVmClustersRequest, + response: oracledatabase.ListCloudVmClustersResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -159,7 +186,12 @@ def pages(self) -> Iterator[oracledatabase.ListCloudVmClustersResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[vm_cluster.CloudVmCluster]: @@ -167,7 +199,7 @@ def __iter__(self) -> Iterator[vm_cluster.CloudVmCluster]: yield from page.cloud_vm_clusters def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListEntitlementsPager: @@ -187,14 +219,17 @@ class ListEntitlementsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., oracledatabase.ListEntitlementsResponse], - request: oracledatabase.ListEntitlementsRequest, - response: oracledatabase.ListEntitlementsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[..., oracledatabase.ListEntitlementsResponse], + request: oracledatabase.ListEntitlementsRequest, + response: oracledatabase.ListEntitlementsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -225,7 +260,12 @@ def pages(self) -> Iterator[oracledatabase.ListEntitlementsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[entitlement.Entitlement]: @@ -233,7 +273,7 @@ def __iter__(self) -> Iterator[entitlement.Entitlement]: yield from page.entitlements def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListDbServersPager: @@ -253,14 +293,17 @@ class ListDbServersPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., oracledatabase.ListDbServersResponse], - request: oracledatabase.ListDbServersRequest, - response: oracledatabase.ListDbServersResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[..., oracledatabase.ListDbServersResponse], + request: oracledatabase.ListDbServersRequest, + response: oracledatabase.ListDbServersResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -291,7 +334,12 @@ def pages(self) -> Iterator[oracledatabase.ListDbServersResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[db_server.DbServer]: @@ -299,7 +347,7 @@ def __iter__(self) -> Iterator[db_server.DbServer]: yield from page.db_servers def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListDbNodesPager: @@ -319,14 +367,17 @@ class ListDbNodesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., oracledatabase.ListDbNodesResponse], - request: oracledatabase.ListDbNodesRequest, - response: oracledatabase.ListDbNodesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[..., oracledatabase.ListDbNodesResponse], + request: oracledatabase.ListDbNodesRequest, + response: oracledatabase.ListDbNodesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -357,7 +408,12 @@ def pages(self) -> Iterator[oracledatabase.ListDbNodesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[db_node.DbNode]: @@ -365,7 +421,7 @@ def __iter__(self) -> Iterator[db_node.DbNode]: yield from page.db_nodes def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListGiVersionsPager: @@ -385,14 +441,17 @@ class ListGiVersionsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., oracledatabase.ListGiVersionsResponse], - request: oracledatabase.ListGiVersionsRequest, - response: oracledatabase.ListGiVersionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[..., oracledatabase.ListGiVersionsResponse], + request: oracledatabase.ListGiVersionsRequest, + response: oracledatabase.ListGiVersionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -423,7 +482,12 @@ def pages(self) -> Iterator[oracledatabase.ListGiVersionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[gi_version.GiVersion]: @@ -431,7 +495,7 @@ def __iter__(self) -> Iterator[gi_version.GiVersion]: yield from page.gi_versions def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListDbSystemShapesPager: @@ -451,14 +515,17 @@ class ListDbSystemShapesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., oracledatabase.ListDbSystemShapesResponse], - request: oracledatabase.ListDbSystemShapesRequest, - response: oracledatabase.ListDbSystemShapesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[..., oracledatabase.ListDbSystemShapesResponse], + request: oracledatabase.ListDbSystemShapesRequest, + response: oracledatabase.ListDbSystemShapesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -489,7 +556,12 @@ def pages(self) -> Iterator[oracledatabase.ListDbSystemShapesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[db_system_shape.DbSystemShape]: @@ -497,7 +569,7 @@ def __iter__(self) -> Iterator[db_system_shape.DbSystemShape]: yield from page.db_system_shapes def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListAutonomousDatabasesPager: @@ -517,14 +589,17 @@ class ListAutonomousDatabasesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., oracledatabase.ListAutonomousDatabasesResponse], - request: oracledatabase.ListAutonomousDatabasesRequest, - response: oracledatabase.ListAutonomousDatabasesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[..., oracledatabase.ListAutonomousDatabasesResponse], + request: oracledatabase.ListAutonomousDatabasesRequest, + response: oracledatabase.ListAutonomousDatabasesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -555,7 +630,12 @@ def pages(self) -> Iterator[oracledatabase.ListAutonomousDatabasesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[autonomous_database.AutonomousDatabase]: @@ -563,7 +643,7 @@ def __iter__(self) -> Iterator[autonomous_database.AutonomousDatabase]: yield from page.autonomous_databases def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListAutonomousDbVersionsPager: @@ -583,14 +663,17 @@ class ListAutonomousDbVersionsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., oracledatabase.ListAutonomousDbVersionsResponse], - request: oracledatabase.ListAutonomousDbVersionsRequest, - response: oracledatabase.ListAutonomousDbVersionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[..., oracledatabase.ListAutonomousDbVersionsResponse], + request: oracledatabase.ListAutonomousDbVersionsRequest, + response: oracledatabase.ListAutonomousDbVersionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -621,7 +704,12 @@ def pages(self) -> Iterator[oracledatabase.ListAutonomousDbVersionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[autonomous_db_version.AutonomousDbVersion]: @@ -629,7 +717,7 @@ def __iter__(self) -> Iterator[autonomous_db_version.AutonomousDbVersion]: yield from page.autonomous_db_versions def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListAutonomousDatabaseCharacterSetsPager: @@ -649,14 +737,19 @@ class ListAutonomousDatabaseCharacterSetsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., oracledatabase.ListAutonomousDatabaseCharacterSetsResponse], - request: oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, - response: oracledatabase.ListAutonomousDatabaseCharacterSetsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[ + ..., oracledatabase.ListAutonomousDatabaseCharacterSetsResponse + ], + request: oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, + response: oracledatabase.ListAutonomousDatabaseCharacterSetsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -673,7 +766,9 @@ def __init__(self, sent along with the request as metadata. """ self._method = method - self._request = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest(request) + self._request = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest( + request + ) self._response = response self._retry = retry self._timeout = timeout @@ -683,19 +778,28 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterator[oracledatabase.ListAutonomousDatabaseCharacterSetsResponse]: + def pages( + self, + ) -> Iterator[oracledatabase.ListAutonomousDatabaseCharacterSetsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response - def __iter__(self) -> Iterator[autonomous_database_character_set.AutonomousDatabaseCharacterSet]: + def __iter__( + self, + ) -> Iterator[autonomous_database_character_set.AutonomousDatabaseCharacterSet]: for page in self.pages: yield from page.autonomous_database_character_sets def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListAutonomousDatabaseBackupsPager: @@ -715,14 +819,17 @@ class ListAutonomousDatabaseBackupsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., oracledatabase.ListAutonomousDatabaseBackupsResponse], - request: oracledatabase.ListAutonomousDatabaseBackupsRequest, - response: oracledatabase.ListAutonomousDatabaseBackupsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[..., oracledatabase.ListAutonomousDatabaseBackupsResponse], + request: oracledatabase.ListAutonomousDatabaseBackupsRequest, + response: oracledatabase.ListAutonomousDatabaseBackupsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -753,7 +860,12 @@ def pages(self) -> Iterator[oracledatabase.ListAutonomousDatabaseBackupsResponse yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[autonomous_db_backup.AutonomousDatabaseBackup]: @@ -761,4 +873,4 @@ def __iter__(self) -> Iterator[autonomous_db_backup.AutonomousDatabaseBackup]: yield from page.autonomous_database_backups def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/__init__.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/__init__.py similarity index 76% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/__init__.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/__init__.py index a6e6aeba08ab..91a06d71780e 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/__init__.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/__init__.py @@ -17,16 +17,14 @@ from typing import Dict, Type from .base import OracleDatabaseTransport -from .rest import OracleDatabaseRestTransport -from .rest import OracleDatabaseRestInterceptor - +from .rest import OracleDatabaseRestInterceptor, OracleDatabaseRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[OracleDatabaseTransport]] -_transport_registry['rest'] = OracleDatabaseRestTransport +_transport_registry["rest"] = OracleDatabaseRestTransport __all__ = ( - 'OracleDatabaseTransport', - 'OracleDatabaseRestTransport', - 'OracleDatabaseRestInterceptor', + "OracleDatabaseTransport", + "OracleDatabaseRestTransport", + "OracleDatabaseRestInterceptor", ) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/base.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/base.py similarity index 68% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/base.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/base.py index 72af8abc4db0..ced22db4e8d6 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/base.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/base.py @@ -16,47 +16,49 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -from google.cloud.oracledatabase_v1 import gapic_version as package_version - -import google.auth # type: ignore import google.api_core from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, operations_v1 from google.api_core import retry as retries -from google.api_core import operations_v1 +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.cloud.oracledatabase_v1.types import autonomous_database -from google.cloud.oracledatabase_v1.types import exadata_infra -from google.cloud.oracledatabase_v1.types import oracledatabase -from google.cloud.oracledatabase_v1.types import vm_cluster -from google.longrunning import operations_pb2 # type: ignore +from google.cloud.oracledatabase_v1 import gapic_version as package_version +from google.cloud.oracledatabase_v1.types import ( + autonomous_database, + exadata_infra, + oracledatabase, + vm_cluster, +) -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class OracleDatabaseTransport(abc.ABC): """Abstract transport class for OracleDatabase.""" - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "oracledatabase.googleapis.com" - DEFAULT_HOST: str = 'oracledatabase.googleapis.com' def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: @@ -92,30 +94,38 @@ def __init__( # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) # Don't apply audience if the credentials file passed from user. if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host @property @@ -403,14 +413,14 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() @@ -420,201 +430,243 @@ def operations_client(self): raise NotImplementedError() @property - def list_cloud_exadata_infrastructures(self) -> Callable[ - [oracledatabase.ListCloudExadataInfrastructuresRequest], - Union[ - oracledatabase.ListCloudExadataInfrastructuresResponse, - Awaitable[oracledatabase.ListCloudExadataInfrastructuresResponse] - ]]: + def list_cloud_exadata_infrastructures( + self, + ) -> Callable[ + [oracledatabase.ListCloudExadataInfrastructuresRequest], + Union[ + oracledatabase.ListCloudExadataInfrastructuresResponse, + Awaitable[oracledatabase.ListCloudExadataInfrastructuresResponse], + ], + ]: raise NotImplementedError() @property - def get_cloud_exadata_infrastructure(self) -> Callable[ - [oracledatabase.GetCloudExadataInfrastructureRequest], - Union[ - exadata_infra.CloudExadataInfrastructure, - Awaitable[exadata_infra.CloudExadataInfrastructure] - ]]: + def get_cloud_exadata_infrastructure( + self, + ) -> Callable[ + [oracledatabase.GetCloudExadataInfrastructureRequest], + Union[ + exadata_infra.CloudExadataInfrastructure, + Awaitable[exadata_infra.CloudExadataInfrastructure], + ], + ]: raise NotImplementedError() @property - def create_cloud_exadata_infrastructure(self) -> Callable[ - [oracledatabase.CreateCloudExadataInfrastructureRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def create_cloud_exadata_infrastructure( + self, + ) -> Callable[ + [oracledatabase.CreateCloudExadataInfrastructureRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def delete_cloud_exadata_infrastructure(self) -> Callable[ - [oracledatabase.DeleteCloudExadataInfrastructureRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def delete_cloud_exadata_infrastructure( + self, + ) -> Callable[ + [oracledatabase.DeleteCloudExadataInfrastructureRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def list_cloud_vm_clusters(self) -> Callable[ - [oracledatabase.ListCloudVmClustersRequest], - Union[ - oracledatabase.ListCloudVmClustersResponse, - Awaitable[oracledatabase.ListCloudVmClustersResponse] - ]]: + def list_cloud_vm_clusters( + self, + ) -> Callable[ + [oracledatabase.ListCloudVmClustersRequest], + Union[ + oracledatabase.ListCloudVmClustersResponse, + Awaitable[oracledatabase.ListCloudVmClustersResponse], + ], + ]: raise NotImplementedError() @property - def get_cloud_vm_cluster(self) -> Callable[ - [oracledatabase.GetCloudVmClusterRequest], - Union[ - vm_cluster.CloudVmCluster, - Awaitable[vm_cluster.CloudVmCluster] - ]]: + def get_cloud_vm_cluster( + self, + ) -> Callable[ + [oracledatabase.GetCloudVmClusterRequest], + Union[vm_cluster.CloudVmCluster, Awaitable[vm_cluster.CloudVmCluster]], + ]: raise NotImplementedError() @property - def create_cloud_vm_cluster(self) -> Callable[ - [oracledatabase.CreateCloudVmClusterRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def create_cloud_vm_cluster( + self, + ) -> Callable[ + [oracledatabase.CreateCloudVmClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def delete_cloud_vm_cluster(self) -> Callable[ - [oracledatabase.DeleteCloudVmClusterRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def delete_cloud_vm_cluster( + self, + ) -> Callable[ + [oracledatabase.DeleteCloudVmClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def list_entitlements(self) -> Callable[ - [oracledatabase.ListEntitlementsRequest], - Union[ - oracledatabase.ListEntitlementsResponse, - Awaitable[oracledatabase.ListEntitlementsResponse] - ]]: + def list_entitlements( + self, + ) -> Callable[ + [oracledatabase.ListEntitlementsRequest], + Union[ + oracledatabase.ListEntitlementsResponse, + Awaitable[oracledatabase.ListEntitlementsResponse], + ], + ]: raise NotImplementedError() @property - def list_db_servers(self) -> Callable[ - [oracledatabase.ListDbServersRequest], - Union[ - oracledatabase.ListDbServersResponse, - Awaitable[oracledatabase.ListDbServersResponse] - ]]: + def list_db_servers( + self, + ) -> Callable[ + [oracledatabase.ListDbServersRequest], + Union[ + oracledatabase.ListDbServersResponse, + Awaitable[oracledatabase.ListDbServersResponse], + ], + ]: raise NotImplementedError() @property - def list_db_nodes(self) -> Callable[ - [oracledatabase.ListDbNodesRequest], - Union[ - oracledatabase.ListDbNodesResponse, - Awaitable[oracledatabase.ListDbNodesResponse] - ]]: + def list_db_nodes( + self, + ) -> Callable[ + [oracledatabase.ListDbNodesRequest], + Union[ + oracledatabase.ListDbNodesResponse, + Awaitable[oracledatabase.ListDbNodesResponse], + ], + ]: raise NotImplementedError() @property - def list_gi_versions(self) -> Callable[ - [oracledatabase.ListGiVersionsRequest], - Union[ - oracledatabase.ListGiVersionsResponse, - Awaitable[oracledatabase.ListGiVersionsResponse] - ]]: + def list_gi_versions( + self, + ) -> Callable[ + [oracledatabase.ListGiVersionsRequest], + Union[ + oracledatabase.ListGiVersionsResponse, + Awaitable[oracledatabase.ListGiVersionsResponse], + ], + ]: raise NotImplementedError() @property - def list_db_system_shapes(self) -> Callable[ - [oracledatabase.ListDbSystemShapesRequest], - Union[ - oracledatabase.ListDbSystemShapesResponse, - Awaitable[oracledatabase.ListDbSystemShapesResponse] - ]]: + def list_db_system_shapes( + self, + ) -> Callable[ + [oracledatabase.ListDbSystemShapesRequest], + Union[ + oracledatabase.ListDbSystemShapesResponse, + Awaitable[oracledatabase.ListDbSystemShapesResponse], + ], + ]: raise NotImplementedError() @property - def list_autonomous_databases(self) -> Callable[ - [oracledatabase.ListAutonomousDatabasesRequest], - Union[ - oracledatabase.ListAutonomousDatabasesResponse, - Awaitable[oracledatabase.ListAutonomousDatabasesResponse] - ]]: + def list_autonomous_databases( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDatabasesRequest], + Union[ + oracledatabase.ListAutonomousDatabasesResponse, + Awaitable[oracledatabase.ListAutonomousDatabasesResponse], + ], + ]: raise NotImplementedError() @property - def get_autonomous_database(self) -> Callable[ - [oracledatabase.GetAutonomousDatabaseRequest], - Union[ - autonomous_database.AutonomousDatabase, - Awaitable[autonomous_database.AutonomousDatabase] - ]]: + def get_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.GetAutonomousDatabaseRequest], + Union[ + autonomous_database.AutonomousDatabase, + Awaitable[autonomous_database.AutonomousDatabase], + ], + ]: raise NotImplementedError() @property - def create_autonomous_database(self) -> Callable[ - [oracledatabase.CreateAutonomousDatabaseRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def create_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.CreateAutonomousDatabaseRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def delete_autonomous_database(self) -> Callable[ - [oracledatabase.DeleteAutonomousDatabaseRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def delete_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.DeleteAutonomousDatabaseRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def restore_autonomous_database(self) -> Callable[ - [oracledatabase.RestoreAutonomousDatabaseRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def restore_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.RestoreAutonomousDatabaseRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def generate_autonomous_database_wallet(self) -> Callable[ - [oracledatabase.GenerateAutonomousDatabaseWalletRequest], - Union[ - oracledatabase.GenerateAutonomousDatabaseWalletResponse, - Awaitable[oracledatabase.GenerateAutonomousDatabaseWalletResponse] - ]]: + def generate_autonomous_database_wallet( + self, + ) -> Callable[ + [oracledatabase.GenerateAutonomousDatabaseWalletRequest], + Union[ + oracledatabase.GenerateAutonomousDatabaseWalletResponse, + Awaitable[oracledatabase.GenerateAutonomousDatabaseWalletResponse], + ], + ]: raise NotImplementedError() @property - def list_autonomous_db_versions(self) -> Callable[ - [oracledatabase.ListAutonomousDbVersionsRequest], - Union[ - oracledatabase.ListAutonomousDbVersionsResponse, - Awaitable[oracledatabase.ListAutonomousDbVersionsResponse] - ]]: + def list_autonomous_db_versions( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDbVersionsRequest], + Union[ + oracledatabase.ListAutonomousDbVersionsResponse, + Awaitable[oracledatabase.ListAutonomousDbVersionsResponse], + ], + ]: raise NotImplementedError() @property - def list_autonomous_database_character_sets(self) -> Callable[ - [oracledatabase.ListAutonomousDatabaseCharacterSetsRequest], - Union[ - oracledatabase.ListAutonomousDatabaseCharacterSetsResponse, - Awaitable[oracledatabase.ListAutonomousDatabaseCharacterSetsResponse] - ]]: + def list_autonomous_database_character_sets( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDatabaseCharacterSetsRequest], + Union[ + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse, + Awaitable[oracledatabase.ListAutonomousDatabaseCharacterSetsResponse], + ], + ]: raise NotImplementedError() @property - def list_autonomous_database_backups(self) -> Callable[ - [oracledatabase.ListAutonomousDatabaseBackupsRequest], - Union[ - oracledatabase.ListAutonomousDatabaseBackupsResponse, - Awaitable[oracledatabase.ListAutonomousDatabaseBackupsResponse] - ]]: + def list_autonomous_database_backups( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDatabaseBackupsRequest], + Union[ + oracledatabase.ListAutonomousDatabaseBackupsResponse, + Awaitable[oracledatabase.ListAutonomousDatabaseBackupsResponse], + ], + ]: raise NotImplementedError() @property @@ -622,7 +674,10 @@ def list_operations( self, ) -> Callable[ [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], ]: raise NotImplementedError() @@ -638,23 +693,18 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: raise NotImplementedError() @property - def get_location(self, + def get_location( + self, ) -> Callable[ [locations_pb2.GetLocationRequest], Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], @@ -662,10 +712,14 @@ def get_location(self, raise NotImplementedError() @property - def list_locations(self, + def list_locations( + self, ) -> Callable[ [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], ]: raise NotImplementedError() @@ -674,6 +728,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'OracleDatabaseTransport', -) +__all__ = ("OracleDatabaseTransport",) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/rest.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/rest.py similarity index 60% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/rest.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/rest.py index 3b4232729794..ad8d2e4a9c29 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/services/oracle_database/transports/rest.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/services/oracle_database/transports/rest.py @@ -14,26 +14,28 @@ # limitations under the License. # -from google.auth.transport.requests import AuthorizedSession # type: ignore +import dataclasses import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.protobuf import json_format -from google.api_core import operations_v1 -from google.cloud.location import locations_pb2 # type: ignore +import grpc # type: ignore from requests import __version__ as requests_version -import dataclasses -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -41,14 +43,17 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore -from google.cloud.oracledatabase_v1.types import autonomous_database -from google.cloud.oracledatabase_v1.types import exadata_infra -from google.cloud.oracledatabase_v1.types import oracledatabase -from google.cloud.oracledatabase_v1.types import vm_cluster from google.longrunning import operations_pb2 # type: ignore -from .base import OracleDatabaseTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from google.cloud.oracledatabase_v1.types import ( + autonomous_database, + exadata_infra, + oracledatabase, + vm_cluster, +) +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import OracleDatabaseTransport DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, @@ -253,7 +258,14 @@ def post_restore_autonomous_database(self, response): """ - def pre_create_autonomous_database(self, request: oracledatabase.CreateAutonomousDatabaseRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.CreateAutonomousDatabaseRequest, Sequence[Tuple[str, str]]]: + + def pre_create_autonomous_database( + self, + request: oracledatabase.CreateAutonomousDatabaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.CreateAutonomousDatabaseRequest, Sequence[Tuple[str, str]] + ]: """Pre-rpc interceptor for create_autonomous_database Override in a subclass to manipulate the request or metadata @@ -261,7 +273,9 @@ def pre_create_autonomous_database(self, request: oracledatabase.CreateAutonomou """ return request, metadata - def post_create_autonomous_database(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_create_autonomous_database( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for create_autonomous_database Override in a subclass to manipulate the response @@ -269,7 +283,15 @@ def post_create_autonomous_database(self, response: operations_pb2.Operation) -> it is returned to user code. """ return response - def pre_create_cloud_exadata_infrastructure(self, request: oracledatabase.CreateCloudExadataInfrastructureRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.CreateCloudExadataInfrastructureRequest, Sequence[Tuple[str, str]]]: + + def pre_create_cloud_exadata_infrastructure( + self, + request: oracledatabase.CreateCloudExadataInfrastructureRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.CreateCloudExadataInfrastructureRequest, + Sequence[Tuple[str, str]], + ]: """Pre-rpc interceptor for create_cloud_exadata_infrastructure Override in a subclass to manipulate the request or metadata @@ -277,7 +299,9 @@ def pre_create_cloud_exadata_infrastructure(self, request: oracledatabase.Create """ return request, metadata - def post_create_cloud_exadata_infrastructure(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_create_cloud_exadata_infrastructure( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for create_cloud_exadata_infrastructure Override in a subclass to manipulate the response @@ -285,7 +309,12 @@ def post_create_cloud_exadata_infrastructure(self, response: operations_pb2.Oper it is returned to user code. """ return response - def pre_create_cloud_vm_cluster(self, request: oracledatabase.CreateCloudVmClusterRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.CreateCloudVmClusterRequest, Sequence[Tuple[str, str]]]: + + def pre_create_cloud_vm_cluster( + self, + request: oracledatabase.CreateCloudVmClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.CreateCloudVmClusterRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for create_cloud_vm_cluster Override in a subclass to manipulate the request or metadata @@ -293,7 +322,9 @@ def pre_create_cloud_vm_cluster(self, request: oracledatabase.CreateCloudVmClust """ return request, metadata - def post_create_cloud_vm_cluster(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_create_cloud_vm_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for create_cloud_vm_cluster Override in a subclass to manipulate the response @@ -301,7 +332,14 @@ def post_create_cloud_vm_cluster(self, response: operations_pb2.Operation) -> op it is returned to user code. """ return response - def pre_delete_autonomous_database(self, request: oracledatabase.DeleteAutonomousDatabaseRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.DeleteAutonomousDatabaseRequest, Sequence[Tuple[str, str]]]: + + def pre_delete_autonomous_database( + self, + request: oracledatabase.DeleteAutonomousDatabaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.DeleteAutonomousDatabaseRequest, Sequence[Tuple[str, str]] + ]: """Pre-rpc interceptor for delete_autonomous_database Override in a subclass to manipulate the request or metadata @@ -309,7 +347,9 @@ def pre_delete_autonomous_database(self, request: oracledatabase.DeleteAutonomou """ return request, metadata - def post_delete_autonomous_database(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_delete_autonomous_database( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_autonomous_database Override in a subclass to manipulate the response @@ -317,7 +357,15 @@ def post_delete_autonomous_database(self, response: operations_pb2.Operation) -> it is returned to user code. """ return response - def pre_delete_cloud_exadata_infrastructure(self, request: oracledatabase.DeleteCloudExadataInfrastructureRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.DeleteCloudExadataInfrastructureRequest, Sequence[Tuple[str, str]]]: + + def pre_delete_cloud_exadata_infrastructure( + self, + request: oracledatabase.DeleteCloudExadataInfrastructureRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.DeleteCloudExadataInfrastructureRequest, + Sequence[Tuple[str, str]], + ]: """Pre-rpc interceptor for delete_cloud_exadata_infrastructure Override in a subclass to manipulate the request or metadata @@ -325,7 +373,9 @@ def pre_delete_cloud_exadata_infrastructure(self, request: oracledatabase.Delete """ return request, metadata - def post_delete_cloud_exadata_infrastructure(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_delete_cloud_exadata_infrastructure( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_cloud_exadata_infrastructure Override in a subclass to manipulate the response @@ -333,7 +383,12 @@ def post_delete_cloud_exadata_infrastructure(self, response: operations_pb2.Oper it is returned to user code. """ return response - def pre_delete_cloud_vm_cluster(self, request: oracledatabase.DeleteCloudVmClusterRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.DeleteCloudVmClusterRequest, Sequence[Tuple[str, str]]]: + + def pre_delete_cloud_vm_cluster( + self, + request: oracledatabase.DeleteCloudVmClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.DeleteCloudVmClusterRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for delete_cloud_vm_cluster Override in a subclass to manipulate the request or metadata @@ -341,7 +396,9 @@ def pre_delete_cloud_vm_cluster(self, request: oracledatabase.DeleteCloudVmClust """ return request, metadata - def post_delete_cloud_vm_cluster(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_delete_cloud_vm_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_cloud_vm_cluster Override in a subclass to manipulate the response @@ -349,7 +406,15 @@ def post_delete_cloud_vm_cluster(self, response: operations_pb2.Operation) -> op it is returned to user code. """ return response - def pre_generate_autonomous_database_wallet(self, request: oracledatabase.GenerateAutonomousDatabaseWalletRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.GenerateAutonomousDatabaseWalletRequest, Sequence[Tuple[str, str]]]: + + def pre_generate_autonomous_database_wallet( + self, + request: oracledatabase.GenerateAutonomousDatabaseWalletRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.GenerateAutonomousDatabaseWalletRequest, + Sequence[Tuple[str, str]], + ]: """Pre-rpc interceptor for generate_autonomous_database_wallet Override in a subclass to manipulate the request or metadata @@ -357,7 +422,9 @@ def pre_generate_autonomous_database_wallet(self, request: oracledatabase.Genera """ return request, metadata - def post_generate_autonomous_database_wallet(self, response: oracledatabase.GenerateAutonomousDatabaseWalletResponse) -> oracledatabase.GenerateAutonomousDatabaseWalletResponse: + def post_generate_autonomous_database_wallet( + self, response: oracledatabase.GenerateAutonomousDatabaseWalletResponse + ) -> oracledatabase.GenerateAutonomousDatabaseWalletResponse: """Post-rpc interceptor for generate_autonomous_database_wallet Override in a subclass to manipulate the response @@ -365,7 +432,12 @@ def post_generate_autonomous_database_wallet(self, response: oracledatabase.Gene it is returned to user code. """ return response - def pre_get_autonomous_database(self, request: oracledatabase.GetAutonomousDatabaseRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.GetAutonomousDatabaseRequest, Sequence[Tuple[str, str]]]: + + def pre_get_autonomous_database( + self, + request: oracledatabase.GetAutonomousDatabaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.GetAutonomousDatabaseRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_autonomous_database Override in a subclass to manipulate the request or metadata @@ -373,7 +445,9 @@ def pre_get_autonomous_database(self, request: oracledatabase.GetAutonomousDatab """ return request, metadata - def post_get_autonomous_database(self, response: autonomous_database.AutonomousDatabase) -> autonomous_database.AutonomousDatabase: + def post_get_autonomous_database( + self, response: autonomous_database.AutonomousDatabase + ) -> autonomous_database.AutonomousDatabase: """Post-rpc interceptor for get_autonomous_database Override in a subclass to manipulate the response @@ -381,7 +455,14 @@ def post_get_autonomous_database(self, response: autonomous_database.AutonomousD it is returned to user code. """ return response - def pre_get_cloud_exadata_infrastructure(self, request: oracledatabase.GetCloudExadataInfrastructureRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.GetCloudExadataInfrastructureRequest, Sequence[Tuple[str, str]]]: + + def pre_get_cloud_exadata_infrastructure( + self, + request: oracledatabase.GetCloudExadataInfrastructureRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.GetCloudExadataInfrastructureRequest, Sequence[Tuple[str, str]] + ]: """Pre-rpc interceptor for get_cloud_exadata_infrastructure Override in a subclass to manipulate the request or metadata @@ -389,7 +470,9 @@ def pre_get_cloud_exadata_infrastructure(self, request: oracledatabase.GetCloudE """ return request, metadata - def post_get_cloud_exadata_infrastructure(self, response: exadata_infra.CloudExadataInfrastructure) -> exadata_infra.CloudExadataInfrastructure: + def post_get_cloud_exadata_infrastructure( + self, response: exadata_infra.CloudExadataInfrastructure + ) -> exadata_infra.CloudExadataInfrastructure: """Post-rpc interceptor for get_cloud_exadata_infrastructure Override in a subclass to manipulate the response @@ -397,7 +480,12 @@ def post_get_cloud_exadata_infrastructure(self, response: exadata_infra.CloudExa it is returned to user code. """ return response - def pre_get_cloud_vm_cluster(self, request: oracledatabase.GetCloudVmClusterRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.GetCloudVmClusterRequest, Sequence[Tuple[str, str]]]: + + def pre_get_cloud_vm_cluster( + self, + request: oracledatabase.GetCloudVmClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.GetCloudVmClusterRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_cloud_vm_cluster Override in a subclass to manipulate the request or metadata @@ -405,7 +493,9 @@ def pre_get_cloud_vm_cluster(self, request: oracledatabase.GetCloudVmClusterRequ """ return request, metadata - def post_get_cloud_vm_cluster(self, response: vm_cluster.CloudVmCluster) -> vm_cluster.CloudVmCluster: + def post_get_cloud_vm_cluster( + self, response: vm_cluster.CloudVmCluster + ) -> vm_cluster.CloudVmCluster: """Post-rpc interceptor for get_cloud_vm_cluster Override in a subclass to manipulate the response @@ -413,7 +503,14 @@ def post_get_cloud_vm_cluster(self, response: vm_cluster.CloudVmCluster) -> vm_c it is returned to user code. """ return response - def pre_list_autonomous_database_backups(self, request: oracledatabase.ListAutonomousDatabaseBackupsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListAutonomousDatabaseBackupsRequest, Sequence[Tuple[str, str]]]: + + def pre_list_autonomous_database_backups( + self, + request: oracledatabase.ListAutonomousDatabaseBackupsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.ListAutonomousDatabaseBackupsRequest, Sequence[Tuple[str, str]] + ]: """Pre-rpc interceptor for list_autonomous_database_backups Override in a subclass to manipulate the request or metadata @@ -421,7 +518,9 @@ def pre_list_autonomous_database_backups(self, request: oracledatabase.ListAuton """ return request, metadata - def post_list_autonomous_database_backups(self, response: oracledatabase.ListAutonomousDatabaseBackupsResponse) -> oracledatabase.ListAutonomousDatabaseBackupsResponse: + def post_list_autonomous_database_backups( + self, response: oracledatabase.ListAutonomousDatabaseBackupsResponse + ) -> oracledatabase.ListAutonomousDatabaseBackupsResponse: """Post-rpc interceptor for list_autonomous_database_backups Override in a subclass to manipulate the response @@ -429,7 +528,15 @@ def post_list_autonomous_database_backups(self, response: oracledatabase.ListAut it is returned to user code. """ return response - def pre_list_autonomous_database_character_sets(self, request: oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, Sequence[Tuple[str, str]]]: + + def pre_list_autonomous_database_character_sets( + self, + request: oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, + Sequence[Tuple[str, str]], + ]: """Pre-rpc interceptor for list_autonomous_database_character_sets Override in a subclass to manipulate the request or metadata @@ -437,7 +544,9 @@ def pre_list_autonomous_database_character_sets(self, request: oracledatabase.Li """ return request, metadata - def post_list_autonomous_database_character_sets(self, response: oracledatabase.ListAutonomousDatabaseCharacterSetsResponse) -> oracledatabase.ListAutonomousDatabaseCharacterSetsResponse: + def post_list_autonomous_database_character_sets( + self, response: oracledatabase.ListAutonomousDatabaseCharacterSetsResponse + ) -> oracledatabase.ListAutonomousDatabaseCharacterSetsResponse: """Post-rpc interceptor for list_autonomous_database_character_sets Override in a subclass to manipulate the response @@ -445,7 +554,14 @@ def post_list_autonomous_database_character_sets(self, response: oracledatabase. it is returned to user code. """ return response - def pre_list_autonomous_databases(self, request: oracledatabase.ListAutonomousDatabasesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListAutonomousDatabasesRequest, Sequence[Tuple[str, str]]]: + + def pre_list_autonomous_databases( + self, + request: oracledatabase.ListAutonomousDatabasesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.ListAutonomousDatabasesRequest, Sequence[Tuple[str, str]] + ]: """Pre-rpc interceptor for list_autonomous_databases Override in a subclass to manipulate the request or metadata @@ -453,7 +569,9 @@ def pre_list_autonomous_databases(self, request: oracledatabase.ListAutonomousDa """ return request, metadata - def post_list_autonomous_databases(self, response: oracledatabase.ListAutonomousDatabasesResponse) -> oracledatabase.ListAutonomousDatabasesResponse: + def post_list_autonomous_databases( + self, response: oracledatabase.ListAutonomousDatabasesResponse + ) -> oracledatabase.ListAutonomousDatabasesResponse: """Post-rpc interceptor for list_autonomous_databases Override in a subclass to manipulate the response @@ -461,7 +579,14 @@ def post_list_autonomous_databases(self, response: oracledatabase.ListAutonomous it is returned to user code. """ return response - def pre_list_autonomous_db_versions(self, request: oracledatabase.ListAutonomousDbVersionsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListAutonomousDbVersionsRequest, Sequence[Tuple[str, str]]]: + + def pre_list_autonomous_db_versions( + self, + request: oracledatabase.ListAutonomousDbVersionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.ListAutonomousDbVersionsRequest, Sequence[Tuple[str, str]] + ]: """Pre-rpc interceptor for list_autonomous_db_versions Override in a subclass to manipulate the request or metadata @@ -469,7 +594,9 @@ def pre_list_autonomous_db_versions(self, request: oracledatabase.ListAutonomous """ return request, metadata - def post_list_autonomous_db_versions(self, response: oracledatabase.ListAutonomousDbVersionsResponse) -> oracledatabase.ListAutonomousDbVersionsResponse: + def post_list_autonomous_db_versions( + self, response: oracledatabase.ListAutonomousDbVersionsResponse + ) -> oracledatabase.ListAutonomousDbVersionsResponse: """Post-rpc interceptor for list_autonomous_db_versions Override in a subclass to manipulate the response @@ -477,7 +604,14 @@ def post_list_autonomous_db_versions(self, response: oracledatabase.ListAutonomo it is returned to user code. """ return response - def pre_list_cloud_exadata_infrastructures(self, request: oracledatabase.ListCloudExadataInfrastructuresRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListCloudExadataInfrastructuresRequest, Sequence[Tuple[str, str]]]: + + def pre_list_cloud_exadata_infrastructures( + self, + request: oracledatabase.ListCloudExadataInfrastructuresRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.ListCloudExadataInfrastructuresRequest, Sequence[Tuple[str, str]] + ]: """Pre-rpc interceptor for list_cloud_exadata_infrastructures Override in a subclass to manipulate the request or metadata @@ -485,7 +619,9 @@ def pre_list_cloud_exadata_infrastructures(self, request: oracledatabase.ListClo """ return request, metadata - def post_list_cloud_exadata_infrastructures(self, response: oracledatabase.ListCloudExadataInfrastructuresResponse) -> oracledatabase.ListCloudExadataInfrastructuresResponse: + def post_list_cloud_exadata_infrastructures( + self, response: oracledatabase.ListCloudExadataInfrastructuresResponse + ) -> oracledatabase.ListCloudExadataInfrastructuresResponse: """Post-rpc interceptor for list_cloud_exadata_infrastructures Override in a subclass to manipulate the response @@ -493,7 +629,12 @@ def post_list_cloud_exadata_infrastructures(self, response: oracledatabase.ListC it is returned to user code. """ return response - def pre_list_cloud_vm_clusters(self, request: oracledatabase.ListCloudVmClustersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListCloudVmClustersRequest, Sequence[Tuple[str, str]]]: + + def pre_list_cloud_vm_clusters( + self, + request: oracledatabase.ListCloudVmClustersRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.ListCloudVmClustersRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_cloud_vm_clusters Override in a subclass to manipulate the request or metadata @@ -501,7 +642,9 @@ def pre_list_cloud_vm_clusters(self, request: oracledatabase.ListCloudVmClusters """ return request, metadata - def post_list_cloud_vm_clusters(self, response: oracledatabase.ListCloudVmClustersResponse) -> oracledatabase.ListCloudVmClustersResponse: + def post_list_cloud_vm_clusters( + self, response: oracledatabase.ListCloudVmClustersResponse + ) -> oracledatabase.ListCloudVmClustersResponse: """Post-rpc interceptor for list_cloud_vm_clusters Override in a subclass to manipulate the response @@ -509,7 +652,12 @@ def post_list_cloud_vm_clusters(self, response: oracledatabase.ListCloudVmCluste it is returned to user code. """ return response - def pre_list_db_nodes(self, request: oracledatabase.ListDbNodesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListDbNodesRequest, Sequence[Tuple[str, str]]]: + + def pre_list_db_nodes( + self, + request: oracledatabase.ListDbNodesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.ListDbNodesRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_db_nodes Override in a subclass to manipulate the request or metadata @@ -517,7 +665,9 @@ def pre_list_db_nodes(self, request: oracledatabase.ListDbNodesRequest, metadata """ return request, metadata - def post_list_db_nodes(self, response: oracledatabase.ListDbNodesResponse) -> oracledatabase.ListDbNodesResponse: + def post_list_db_nodes( + self, response: oracledatabase.ListDbNodesResponse + ) -> oracledatabase.ListDbNodesResponse: """Post-rpc interceptor for list_db_nodes Override in a subclass to manipulate the response @@ -525,7 +675,12 @@ def post_list_db_nodes(self, response: oracledatabase.ListDbNodesResponse) -> or it is returned to user code. """ return response - def pre_list_db_servers(self, request: oracledatabase.ListDbServersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListDbServersRequest, Sequence[Tuple[str, str]]]: + + def pre_list_db_servers( + self, + request: oracledatabase.ListDbServersRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.ListDbServersRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_db_servers Override in a subclass to manipulate the request or metadata @@ -533,7 +688,9 @@ def pre_list_db_servers(self, request: oracledatabase.ListDbServersRequest, meta """ return request, metadata - def post_list_db_servers(self, response: oracledatabase.ListDbServersResponse) -> oracledatabase.ListDbServersResponse: + def post_list_db_servers( + self, response: oracledatabase.ListDbServersResponse + ) -> oracledatabase.ListDbServersResponse: """Post-rpc interceptor for list_db_servers Override in a subclass to manipulate the response @@ -541,7 +698,12 @@ def post_list_db_servers(self, response: oracledatabase.ListDbServersResponse) - it is returned to user code. """ return response - def pre_list_db_system_shapes(self, request: oracledatabase.ListDbSystemShapesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListDbSystemShapesRequest, Sequence[Tuple[str, str]]]: + + def pre_list_db_system_shapes( + self, + request: oracledatabase.ListDbSystemShapesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.ListDbSystemShapesRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_db_system_shapes Override in a subclass to manipulate the request or metadata @@ -549,7 +711,9 @@ def pre_list_db_system_shapes(self, request: oracledatabase.ListDbSystemShapesRe """ return request, metadata - def post_list_db_system_shapes(self, response: oracledatabase.ListDbSystemShapesResponse) -> oracledatabase.ListDbSystemShapesResponse: + def post_list_db_system_shapes( + self, response: oracledatabase.ListDbSystemShapesResponse + ) -> oracledatabase.ListDbSystemShapesResponse: """Post-rpc interceptor for list_db_system_shapes Override in a subclass to manipulate the response @@ -557,7 +721,12 @@ def post_list_db_system_shapes(self, response: oracledatabase.ListDbSystemShapes it is returned to user code. """ return response - def pre_list_entitlements(self, request: oracledatabase.ListEntitlementsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListEntitlementsRequest, Sequence[Tuple[str, str]]]: + + def pre_list_entitlements( + self, + request: oracledatabase.ListEntitlementsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.ListEntitlementsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_entitlements Override in a subclass to manipulate the request or metadata @@ -565,7 +734,9 @@ def pre_list_entitlements(self, request: oracledatabase.ListEntitlementsRequest, """ return request, metadata - def post_list_entitlements(self, response: oracledatabase.ListEntitlementsResponse) -> oracledatabase.ListEntitlementsResponse: + def post_list_entitlements( + self, response: oracledatabase.ListEntitlementsResponse + ) -> oracledatabase.ListEntitlementsResponse: """Post-rpc interceptor for list_entitlements Override in a subclass to manipulate the response @@ -573,7 +744,12 @@ def post_list_entitlements(self, response: oracledatabase.ListEntitlementsRespon it is returned to user code. """ return response - def pre_list_gi_versions(self, request: oracledatabase.ListGiVersionsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.ListGiVersionsRequest, Sequence[Tuple[str, str]]]: + + def pre_list_gi_versions( + self, + request: oracledatabase.ListGiVersionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[oracledatabase.ListGiVersionsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_gi_versions Override in a subclass to manipulate the request or metadata @@ -581,7 +757,9 @@ def pre_list_gi_versions(self, request: oracledatabase.ListGiVersionsRequest, me """ return request, metadata - def post_list_gi_versions(self, response: oracledatabase.ListGiVersionsResponse) -> oracledatabase.ListGiVersionsResponse: + def post_list_gi_versions( + self, response: oracledatabase.ListGiVersionsResponse + ) -> oracledatabase.ListGiVersionsResponse: """Post-rpc interceptor for list_gi_versions Override in a subclass to manipulate the response @@ -589,7 +767,14 @@ def post_list_gi_versions(self, response: oracledatabase.ListGiVersionsResponse) it is returned to user code. """ return response - def pre_restore_autonomous_database(self, request: oracledatabase.RestoreAutonomousDatabaseRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[oracledatabase.RestoreAutonomousDatabaseRequest, Sequence[Tuple[str, str]]]: + + def pre_restore_autonomous_database( + self, + request: oracledatabase.RestoreAutonomousDatabaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + oracledatabase.RestoreAutonomousDatabaseRequest, Sequence[Tuple[str, str]] + ]: """Pre-rpc interceptor for restore_autonomous_database Override in a subclass to manipulate the request or metadata @@ -597,7 +782,9 @@ def pre_restore_autonomous_database(self, request: oracledatabase.RestoreAutonom """ return request, metadata - def post_restore_autonomous_database(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_restore_autonomous_database( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for restore_autonomous_database Override in a subclass to manipulate the response @@ -607,7 +794,9 @@ def post_restore_autonomous_database(self, response: operations_pb2.Operation) - return response def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, str]] + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_location @@ -626,8 +815,11 @@ def post_get_location( it is returned to user code. """ return response + def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, str]] + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_locations @@ -646,8 +838,11 @@ def post_list_locations( it is returned to user code. """ return response + def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for cancel_operation @@ -656,9 +851,7 @@ def pre_cancel_operation( """ return request, metadata - def post_cancel_operation( - self, response: None - ) -> None: + def post_cancel_operation(self, response: None) -> None: """Post-rpc interceptor for cancel_operation Override in a subclass to manipulate the response @@ -666,8 +859,11 @@ def post_cancel_operation( it is returned to user code. """ return response + def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for delete_operation @@ -676,9 +872,7 @@ def pre_delete_operation( """ return request, metadata - def post_delete_operation( - self, response: None - ) -> None: + def post_delete_operation(self, response: None) -> None: """Post-rpc interceptor for delete_operation Override in a subclass to manipulate the response @@ -686,8 +880,11 @@ def post_delete_operation( it is returned to user code. """ return response + def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_operation @@ -706,8 +903,11 @@ def post_get_operation( it is returned to user code. """ return response + def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_operations @@ -748,20 +948,21 @@ class OracleDatabaseRestTransport(OracleDatabaseTransport): """ - def __init__(self, *, - host: str = 'oracledatabase.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[OracleDatabaseRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "oracledatabase.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[OracleDatabaseRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -800,7 +1001,9 @@ def __init__(self, *, # credentials object maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER url_match_items = maybe_url_match.groupdict() @@ -811,10 +1014,11 @@ def __init__(self, *, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience + api_audience=api_audience, ) self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) + self._credentials, default_host=self.DEFAULT_HOST + ) self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) @@ -831,42 +1035,45 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ + "google.longrunning.Operations.CancelOperation": [ { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", }, ], - 'google.longrunning.Operations.DeleteOperation': [ + "google.longrunning.Operations.DeleteOperation": [ { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.GetOperation': [ + "google.longrunning.Operations.GetOperation": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.ListOperations': [ + "google.longrunning.Operations.ListOperations": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", }, ], } rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) # Return the client from cache. return self._operations_client @@ -875,77 +1082,88 @@ class _CreateAutonomousDatabase(OracleDatabaseRestStub): def __hash__(self): return hash("CreateAutonomousDatabase") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "autonomousDatabaseId" : "", } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "autonomousDatabaseId": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.CreateAutonomousDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the create autonomous - database method over HTTP. - - Args: - request (~.oracledatabase.CreateAutonomousDatabaseRequest): - The request object. The request for ``AutonomousDatabase.Create``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + def __call__( + self, + request: oracledatabase.CreateAutonomousDatabaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create autonomous + database method over HTTP. + + Args: + request (~.oracledatabase.CreateAutonomousDatabaseRequest): + The request object. The request for ``AutonomousDatabase.Create``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/autonomousDatabases', - 'body': 'autonomous_database', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/autonomousDatabases", + "body": "autonomous_database", + }, ] - request, metadata = self._interceptor.pre_create_autonomous_database(request, metadata) + request, metadata = self._interceptor.pre_create_autonomous_database( + request, metadata + ) pb_request = oracledatabase.CreateAutonomousDatabaseRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True + transcoded_request["body"], use_integers_for_enums=True ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -962,77 +1180,93 @@ class _CreateCloudExadataInfrastructure(OracleDatabaseRestStub): def __hash__(self): return hash("CreateCloudExadataInfrastructure") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "cloudExadataInfrastructureId" : "", } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "cloudExadataInfrastructureId": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.CreateCloudExadataInfrastructureRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the create cloud exadata - infrastructure method over HTTP. - - Args: - request (~.oracledatabase.CreateCloudExadataInfrastructureRequest): - The request object. The request for ``CloudExadataInfrastructure.Create``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + def __call__( + self, + request: oracledatabase.CreateCloudExadataInfrastructureRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create cloud exadata + infrastructure method over HTTP. + + Args: + request (~.oracledatabase.CreateCloudExadataInfrastructureRequest): + The request object. The request for ``CloudExadataInfrastructure.Create``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/cloudExadataInfrastructures', - 'body': 'cloud_exadata_infrastructure', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/cloudExadataInfrastructures", + "body": "cloud_exadata_infrastructure", + }, ] - request, metadata = self._interceptor.pre_create_cloud_exadata_infrastructure(request, metadata) - pb_request = oracledatabase.CreateCloudExadataInfrastructureRequest.pb(request) + ( + request, + metadata, + ) = self._interceptor.pre_create_cloud_exadata_infrastructure( + request, metadata + ) + pb_request = oracledatabase.CreateCloudExadataInfrastructureRequest.pb( + request + ) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True + transcoded_request["body"], use_integers_for_enums=True ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1049,19 +1283,26 @@ class _CreateCloudVmCluster(OracleDatabaseRestStub): def __hash__(self): return hash("CreateCloudVmCluster") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "cloudVmClusterId" : "", } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "cloudVmClusterId": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.CreateCloudVmClusterRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.CreateCloudVmClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: r"""Call the create cloud vm cluster method over HTTP. Args: @@ -1081,44 +1322,48 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/cloudVmClusters', - 'body': 'cloud_vm_cluster', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/cloudVmClusters", + "body": "cloud_vm_cluster", + }, ] - request, metadata = self._interceptor.pre_create_cloud_vm_cluster(request, metadata) + request, metadata = self._interceptor.pre_create_cloud_vm_cluster( + request, metadata + ) pb_request = oracledatabase.CreateCloudVmClusterRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True + transcoded_request["body"], use_integers_for_enums=True ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1135,69 +1380,79 @@ class _DeleteAutonomousDatabase(OracleDatabaseRestStub): def __hash__(self): return hash("DeleteAutonomousDatabase") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.DeleteAutonomousDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete autonomous - database method over HTTP. - - Args: - request (~.oracledatabase.DeleteAutonomousDatabaseRequest): - The request object. The request for ``AutonomousDatabase.Delete``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + def __call__( + self, + request: oracledatabase.DeleteAutonomousDatabaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete autonomous + database method over HTTP. + + Args: + request (~.oracledatabase.DeleteAutonomousDatabaseRequest): + The request object. The request for ``AutonomousDatabase.Delete``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/autonomousDatabases/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/autonomousDatabases/*}", + }, ] - request, metadata = self._interceptor.pre_delete_autonomous_database(request, metadata) + request, metadata = self._interceptor.pre_delete_autonomous_database( + request, metadata + ) pb_request = oracledatabase.DeleteAutonomousDatabaseRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1214,69 +1469,84 @@ class _DeleteCloudExadataInfrastructure(OracleDatabaseRestStub): def __hash__(self): return hash("DeleteCloudExadataInfrastructure") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.DeleteCloudExadataInfrastructureRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete cloud exadata - infrastructure method over HTTP. - - Args: - request (~.oracledatabase.DeleteCloudExadataInfrastructureRequest): - The request object. The request for ``CloudExadataInfrastructure.Delete``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + def __call__( + self, + request: oracledatabase.DeleteCloudExadataInfrastructureRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete cloud exadata + infrastructure method over HTTP. + + Args: + request (~.oracledatabase.DeleteCloudExadataInfrastructureRequest): + The request object. The request for ``CloudExadataInfrastructure.Delete``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/cloudExadataInfrastructures/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/cloudExadataInfrastructures/*}", + }, ] - request, metadata = self._interceptor.pre_delete_cloud_exadata_infrastructure(request, metadata) - pb_request = oracledatabase.DeleteCloudExadataInfrastructureRequest.pb(request) + ( + request, + metadata, + ) = self._interceptor.pre_delete_cloud_exadata_infrastructure( + request, metadata + ) + pb_request = oracledatabase.DeleteCloudExadataInfrastructureRequest.pb( + request + ) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1293,19 +1563,24 @@ class _DeleteCloudVmCluster(OracleDatabaseRestStub): def __hash__(self): return hash("DeleteCloudVmCluster") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.DeleteCloudVmClusterRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.DeleteCloudVmClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: r"""Call the delete cloud vm cluster method over HTTP. Args: @@ -1325,36 +1600,41 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/cloudVmClusters/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/cloudVmClusters/*}", + }, ] - request, metadata = self._interceptor.pre_delete_cloud_vm_cluster(request, metadata) + request, metadata = self._interceptor.pre_delete_cloud_vm_cluster( + request, metadata + ) pb_request = oracledatabase.DeleteCloudVmClusterRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1371,74 +1651,88 @@ class _GenerateAutonomousDatabaseWallet(OracleDatabaseRestStub): def __hash__(self): return hash("GenerateAutonomousDatabaseWallet") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.GenerateAutonomousDatabaseWalletRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> oracledatabase.GenerateAutonomousDatabaseWalletResponse: - r"""Call the generate autonomous - database wallet method over HTTP. - - Args: - request (~.oracledatabase.GenerateAutonomousDatabaseWalletRequest): - The request object. The request for ``AutonomousDatabase.GenerateWallet``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } - Returns: - ~.oracledatabase.GenerateAutonomousDatabaseWalletResponse: - The response for ``AutonomousDatabase.GenerateWallet``. + def __call__( + self, + request: oracledatabase.GenerateAutonomousDatabaseWalletRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.GenerateAutonomousDatabaseWalletResponse: + r"""Call the generate autonomous + database wallet method over HTTP. + + Args: + request (~.oracledatabase.GenerateAutonomousDatabaseWalletRequest): + The request object. The request for ``AutonomousDatabase.GenerateWallet``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.GenerateAutonomousDatabaseWalletResponse: + The response for ``AutonomousDatabase.GenerateWallet``. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/autonomousDatabases/*}:generateWallet', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/autonomousDatabases/*}:generateWallet", + "body": "*", + }, ] - request, metadata = self._interceptor.pre_generate_autonomous_database_wallet(request, metadata) - pb_request = oracledatabase.GenerateAutonomousDatabaseWalletRequest.pb(request) + ( + request, + metadata, + ) = self._interceptor.pre_generate_autonomous_database_wallet( + request, metadata + ) + pb_request = oracledatabase.GenerateAutonomousDatabaseWalletRequest.pb( + request + ) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True + transcoded_request["body"], use_integers_for_enums=True ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1457,19 +1751,24 @@ class _GetAutonomousDatabase(OracleDatabaseRestStub): def __hash__(self): return hash("GetAutonomousDatabase") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.GetAutonomousDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> autonomous_database.AutonomousDatabase: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.GetAutonomousDatabaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autonomous_database.AutonomousDatabase: r"""Call the get autonomous database method over HTTP. Args: @@ -1489,36 +1788,41 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/autonomousDatabases/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/autonomousDatabases/*}", + }, ] - request, metadata = self._interceptor.pre_get_autonomous_database(request, metadata) + request, metadata = self._interceptor.pre_get_autonomous_database( + request, metadata + ) pb_request = oracledatabase.GetAutonomousDatabaseRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1537,69 +1841,79 @@ class _GetCloudExadataInfrastructure(OracleDatabaseRestStub): def __hash__(self): return hash("GetCloudExadataInfrastructure") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.GetCloudExadataInfrastructureRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> exadata_infra.CloudExadataInfrastructure: - r"""Call the get cloud exadata - infrastructure method over HTTP. - - Args: - request (~.oracledatabase.GetCloudExadataInfrastructureRequest): - The request object. The request for ``CloudExadataInfrastructure.Get``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } - Returns: - ~.exadata_infra.CloudExadataInfrastructure: - Represents CloudExadataInfrastructure - resource. - https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/CloudExadataInfrastructure/ + def __call__( + self, + request: oracledatabase.GetCloudExadataInfrastructureRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> exadata_infra.CloudExadataInfrastructure: + r"""Call the get cloud exadata + infrastructure method over HTTP. + + Args: + request (~.oracledatabase.GetCloudExadataInfrastructureRequest): + The request object. The request for ``CloudExadataInfrastructure.Get``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.exadata_infra.CloudExadataInfrastructure: + Represents CloudExadataInfrastructure + resource. + https://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/CloudExadataInfrastructure/ """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/cloudExadataInfrastructures/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/cloudExadataInfrastructures/*}", + }, ] - request, metadata = self._interceptor.pre_get_cloud_exadata_infrastructure(request, metadata) + request, metadata = self._interceptor.pre_get_cloud_exadata_infrastructure( + request, metadata + ) pb_request = oracledatabase.GetCloudExadataInfrastructureRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1618,19 +1932,24 @@ class _GetCloudVmCluster(OracleDatabaseRestStub): def __hash__(self): return hash("GetCloudVmCluster") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.GetCloudVmClusterRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> vm_cluster.CloudVmCluster: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.GetCloudVmClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vm_cluster.CloudVmCluster: r"""Call the get cloud vm cluster method over HTTP. Args: @@ -1650,36 +1969,41 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/cloudVmClusters/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/cloudVmClusters/*}", + }, ] - request, metadata = self._interceptor.pre_get_cloud_vm_cluster(request, metadata) + request, metadata = self._interceptor.pre_get_cloud_vm_cluster( + request, metadata + ) pb_request = oracledatabase.GetCloudVmClusterRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1698,66 +2022,76 @@ class _ListAutonomousDatabaseBackups(OracleDatabaseRestStub): def __hash__(self): return hash("ListAutonomousDatabaseBackups") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.ListAutonomousDatabaseBackupsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> oracledatabase.ListAutonomousDatabaseBackupsResponse: - r"""Call the list autonomous database - backups method over HTTP. - - Args: - request (~.oracledatabase.ListAutonomousDatabaseBackupsRequest): - The request object. The request for ``AutonomousDatabaseBackup.List``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } - Returns: - ~.oracledatabase.ListAutonomousDatabaseBackupsResponse: - The response for ``AutonomousDatabaseBackup.List``. + def __call__( + self, + request: oracledatabase.ListAutonomousDatabaseBackupsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListAutonomousDatabaseBackupsResponse: + r"""Call the list autonomous database + backups method over HTTP. + + Args: + request (~.oracledatabase.ListAutonomousDatabaseBackupsRequest): + The request object. The request for ``AutonomousDatabaseBackup.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListAutonomousDatabaseBackupsResponse: + The response for ``AutonomousDatabaseBackup.List``. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/autonomousDatabaseBackups', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/autonomousDatabaseBackups", + }, ] - request, metadata = self._interceptor.pre_list_autonomous_database_backups(request, metadata) + request, metadata = self._interceptor.pre_list_autonomous_database_backups( + request, metadata + ) pb_request = oracledatabase.ListAutonomousDatabaseBackupsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1776,68 +2110,83 @@ class _ListAutonomousDatabaseCharacterSets(OracleDatabaseRestStub): def __hash__(self): return hash("ListAutonomousDatabaseCharacterSets") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> oracledatabase.ListAutonomousDatabaseCharacterSetsResponse: - r"""Call the list autonomous database - character sets method over HTTP. - - Args: - request (~.oracledatabase.ListAutonomousDatabaseCharacterSetsRequest): - The request object. The request for ``AutonomousDatabaseCharacterSet.List``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } - Returns: - ~.oracledatabase.ListAutonomousDatabaseCharacterSetsResponse: - The response for - ``AutonomousDatabaseCharacterSet.List``. + def __call__( + self, + request: oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListAutonomousDatabaseCharacterSetsResponse: + r"""Call the list autonomous database + character sets method over HTTP. + + Args: + request (~.oracledatabase.ListAutonomousDatabaseCharacterSetsRequest): + The request object. The request for ``AutonomousDatabaseCharacterSet.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListAutonomousDatabaseCharacterSetsResponse: + The response for + ``AutonomousDatabaseCharacterSet.List``. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/autonomousDatabaseCharacterSets', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/autonomousDatabaseCharacterSets", + }, ] - request, metadata = self._interceptor.pre_list_autonomous_database_character_sets(request, metadata) - pb_request = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest.pb(request) + ( + request, + metadata, + ) = self._interceptor.pre_list_autonomous_database_character_sets( + request, metadata + ) + pb_request = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest.pb( + request + ) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1846,7 +2195,9 @@ def __call__(self, # Return the response resp = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse() - pb_resp = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.pb(resp) + pb_resp = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.pb( + resp + ) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_autonomous_database_character_sets(resp) @@ -1856,19 +2207,24 @@ class _ListAutonomousDatabases(OracleDatabaseRestStub): def __hash__(self): return hash("ListAutonomousDatabases") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.ListAutonomousDatabasesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> oracledatabase.ListAutonomousDatabasesResponse: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListAutonomousDatabasesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListAutonomousDatabasesResponse: r"""Call the list autonomous databases method over HTTP. Args: @@ -1885,36 +2241,41 @@ def __call__(self, The response for ``AutonomousDatabase.List``. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/autonomousDatabases', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/autonomousDatabases", + }, ] - request, metadata = self._interceptor.pre_list_autonomous_databases(request, metadata) + request, metadata = self._interceptor.pre_list_autonomous_databases( + request, metadata + ) pb_request = oracledatabase.ListAutonomousDatabasesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1933,66 +2294,76 @@ class _ListAutonomousDbVersions(OracleDatabaseRestStub): def __hash__(self): return hash("ListAutonomousDbVersions") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.ListAutonomousDbVersionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> oracledatabase.ListAutonomousDbVersionsResponse: - r"""Call the list autonomous db - versions method over HTTP. - - Args: - request (~.oracledatabase.ListAutonomousDbVersionsRequest): - The request object. The request for ``AutonomousDbVersion.List``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } - Returns: - ~.oracledatabase.ListAutonomousDbVersionsResponse: - The response for ``AutonomousDbVersion.List``. + def __call__( + self, + request: oracledatabase.ListAutonomousDbVersionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListAutonomousDbVersionsResponse: + r"""Call the list autonomous db + versions method over HTTP. + + Args: + request (~.oracledatabase.ListAutonomousDbVersionsRequest): + The request object. The request for ``AutonomousDbVersion.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListAutonomousDbVersionsResponse: + The response for ``AutonomousDbVersion.List``. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/autonomousDbVersions', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/autonomousDbVersions", + }, ] - request, metadata = self._interceptor.pre_list_autonomous_db_versions(request, metadata) + request, metadata = self._interceptor.pre_list_autonomous_db_versions( + request, metadata + ) pb_request = oracledatabase.ListAutonomousDbVersionsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2011,66 +2382,81 @@ class _ListCloudExadataInfrastructures(OracleDatabaseRestStub): def __hash__(self): return hash("ListCloudExadataInfrastructures") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.ListCloudExadataInfrastructuresRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> oracledatabase.ListCloudExadataInfrastructuresResponse: - r"""Call the list cloud exadata - infrastructures method over HTTP. - - Args: - request (~.oracledatabase.ListCloudExadataInfrastructuresRequest): - The request object. The request for ``CloudExadataInfrastructures.List``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } - Returns: - ~.oracledatabase.ListCloudExadataInfrastructuresResponse: - The response for ``CloudExadataInfrastructures.list``. + def __call__( + self, + request: oracledatabase.ListCloudExadataInfrastructuresRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListCloudExadataInfrastructuresResponse: + r"""Call the list cloud exadata + infrastructures method over HTTP. + + Args: + request (~.oracledatabase.ListCloudExadataInfrastructuresRequest): + The request object. The request for ``CloudExadataInfrastructures.List``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.oracledatabase.ListCloudExadataInfrastructuresResponse: + The response for ``CloudExadataInfrastructures.list``. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/cloudExadataInfrastructures', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/cloudExadataInfrastructures", + }, ] - request, metadata = self._interceptor.pre_list_cloud_exadata_infrastructures(request, metadata) - pb_request = oracledatabase.ListCloudExadataInfrastructuresRequest.pb(request) + ( + request, + metadata, + ) = self._interceptor.pre_list_cloud_exadata_infrastructures( + request, metadata + ) + pb_request = oracledatabase.ListCloudExadataInfrastructuresRequest.pb( + request + ) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2089,19 +2475,24 @@ class _ListCloudVmClusters(OracleDatabaseRestStub): def __hash__(self): return hash("ListCloudVmClusters") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.ListCloudVmClustersRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> oracledatabase.ListCloudVmClustersResponse: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListCloudVmClustersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListCloudVmClustersResponse: r"""Call the list cloud vm clusters method over HTTP. Args: @@ -2118,36 +2509,41 @@ def __call__(self, The response for ``CloudVmCluster.List``. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/cloudVmClusters', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/cloudVmClusters", + }, ] - request, metadata = self._interceptor.pre_list_cloud_vm_clusters(request, metadata) + request, metadata = self._interceptor.pre_list_cloud_vm_clusters( + request, metadata + ) pb_request = oracledatabase.ListCloudVmClustersRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2166,19 +2562,24 @@ class _ListDbNodes(OracleDatabaseRestStub): def __hash__(self): return hash("ListDbNodes") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.ListDbNodesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> oracledatabase.ListDbNodesResponse: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListDbNodesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListDbNodesResponse: r"""Call the list db nodes method over HTTP. Args: @@ -2195,36 +2596,39 @@ def __call__(self, The response for ``DbNode.List``. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/cloudVmClusters/*}/dbNodes', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/cloudVmClusters/*}/dbNodes", + }, ] request, metadata = self._interceptor.pre_list_db_nodes(request, metadata) pb_request = oracledatabase.ListDbNodesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2243,19 +2647,24 @@ class _ListDbServers(OracleDatabaseRestStub): def __hash__(self): return hash("ListDbServers") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.ListDbServersRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> oracledatabase.ListDbServersResponse: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListDbServersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListDbServersResponse: r"""Call the list db servers method over HTTP. Args: @@ -2272,36 +2681,39 @@ def __call__(self, The response for ``DbServer.List``. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/cloudExadataInfrastructures/*}/dbServers', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/cloudExadataInfrastructures/*}/dbServers", + }, ] request, metadata = self._interceptor.pre_list_db_servers(request, metadata) pb_request = oracledatabase.ListDbServersRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2320,19 +2732,24 @@ class _ListDbSystemShapes(OracleDatabaseRestStub): def __hash__(self): return hash("ListDbSystemShapes") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.ListDbSystemShapesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> oracledatabase.ListDbSystemShapesResponse: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListDbSystemShapesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListDbSystemShapesResponse: r"""Call the list db system shapes method over HTTP. Args: @@ -2349,36 +2766,41 @@ def __call__(self, The response for ``DbSystemShape.List``. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/dbSystemShapes', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/dbSystemShapes", + }, ] - request, metadata = self._interceptor.pre_list_db_system_shapes(request, metadata) + request, metadata = self._interceptor.pre_list_db_system_shapes( + request, metadata + ) pb_request = oracledatabase.ListDbSystemShapesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2397,19 +2819,24 @@ class _ListEntitlements(OracleDatabaseRestStub): def __hash__(self): return hash("ListEntitlements") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.ListEntitlementsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> oracledatabase.ListEntitlementsResponse: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListEntitlementsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListEntitlementsResponse: r"""Call the list entitlements method over HTTP. Args: @@ -2426,36 +2853,41 @@ def __call__(self, The response for ``Entitlement.List``. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/entitlements', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/entitlements", + }, ] - request, metadata = self._interceptor.pre_list_entitlements(request, metadata) + request, metadata = self._interceptor.pre_list_entitlements( + request, metadata + ) pb_request = oracledatabase.ListEntitlementsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2474,19 +2906,24 @@ class _ListGiVersions(OracleDatabaseRestStub): def __hash__(self): return hash("ListGiVersions") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.ListGiVersionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> oracledatabase.ListGiVersionsResponse: + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: oracledatabase.ListGiVersionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> oracledatabase.ListGiVersionsResponse: r"""Call the list gi versions method over HTTP. Args: @@ -2503,36 +2940,41 @@ def __call__(self, The response for ``GiVersion.List``. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/giVersions', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/giVersions", + }, ] - request, metadata = self._interceptor.pre_list_gi_versions(request, metadata) + request, metadata = self._interceptor.pre_list_gi_versions( + request, metadata + ) pb_request = oracledatabase.ListGiVersionsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2551,77 +2993,86 @@ class _RestoreAutonomousDatabase(OracleDatabaseRestStub): def __hash__(self): return hash("RestoreAutonomousDatabase") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: oracledatabase.RestoreAutonomousDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the restore autonomous - database method over HTTP. - - Args: - request (~.oracledatabase.RestoreAutonomousDatabaseRequest): - The request object. The request for ``AutonomousDatabase.Restore``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + def __call__( + self, + request: oracledatabase.RestoreAutonomousDatabaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the restore autonomous + database method over HTTP. + + Args: + request (~.oracledatabase.RestoreAutonomousDatabaseRequest): + The request object. The request for ``AutonomousDatabase.Restore``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/autonomousDatabases/*}:restore', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/autonomousDatabases/*}:restore", + "body": "*", + }, ] - request, metadata = self._interceptor.pre_restore_autonomous_database(request, metadata) + request, metadata = self._interceptor.pre_restore_autonomous_database( + request, metadata + ) pb_request = oracledatabase.RestoreAutonomousDatabaseRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True + transcoded_request["body"], use_integers_for_enums=True ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2635,193 +3086,249 @@ def __call__(self, return resp @property - def create_autonomous_database(self) -> Callable[ - [oracledatabase.CreateAutonomousDatabaseRequest], - operations_pb2.Operation]: + def create_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.CreateAutonomousDatabaseRequest], operations_pb2.Operation + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateAutonomousDatabase(self._session, self._host, self._interceptor) # type: ignore + return self._CreateAutonomousDatabase(self._session, self._host, self._interceptor) # type: ignore @property - def create_cloud_exadata_infrastructure(self) -> Callable[ - [oracledatabase.CreateCloudExadataInfrastructureRequest], - operations_pb2.Operation]: + def create_cloud_exadata_infrastructure( + self, + ) -> Callable[ + [oracledatabase.CreateCloudExadataInfrastructureRequest], + operations_pb2.Operation, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateCloudExadataInfrastructure(self._session, self._host, self._interceptor) # type: ignore + return self._CreateCloudExadataInfrastructure(self._session, self._host, self._interceptor) # type: ignore @property - def create_cloud_vm_cluster(self) -> Callable[ - [oracledatabase.CreateCloudVmClusterRequest], - operations_pb2.Operation]: + def create_cloud_vm_cluster( + self, + ) -> Callable[ + [oracledatabase.CreateCloudVmClusterRequest], operations_pb2.Operation + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateCloudVmCluster(self._session, self._host, self._interceptor) # type: ignore + return self._CreateCloudVmCluster(self._session, self._host, self._interceptor) # type: ignore @property - def delete_autonomous_database(self) -> Callable[ - [oracledatabase.DeleteAutonomousDatabaseRequest], - operations_pb2.Operation]: + def delete_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.DeleteAutonomousDatabaseRequest], operations_pb2.Operation + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteAutonomousDatabase(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteAutonomousDatabase(self._session, self._host, self._interceptor) # type: ignore @property - def delete_cloud_exadata_infrastructure(self) -> Callable[ - [oracledatabase.DeleteCloudExadataInfrastructureRequest], - operations_pb2.Operation]: + def delete_cloud_exadata_infrastructure( + self, + ) -> Callable[ + [oracledatabase.DeleteCloudExadataInfrastructureRequest], + operations_pb2.Operation, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteCloudExadataInfrastructure(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteCloudExadataInfrastructure(self._session, self._host, self._interceptor) # type: ignore @property - def delete_cloud_vm_cluster(self) -> Callable[ - [oracledatabase.DeleteCloudVmClusterRequest], - operations_pb2.Operation]: + def delete_cloud_vm_cluster( + self, + ) -> Callable[ + [oracledatabase.DeleteCloudVmClusterRequest], operations_pb2.Operation + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteCloudVmCluster(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteCloudVmCluster(self._session, self._host, self._interceptor) # type: ignore @property - def generate_autonomous_database_wallet(self) -> Callable[ - [oracledatabase.GenerateAutonomousDatabaseWalletRequest], - oracledatabase.GenerateAutonomousDatabaseWalletResponse]: + def generate_autonomous_database_wallet( + self, + ) -> Callable[ + [oracledatabase.GenerateAutonomousDatabaseWalletRequest], + oracledatabase.GenerateAutonomousDatabaseWalletResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GenerateAutonomousDatabaseWallet(self._session, self._host, self._interceptor) # type: ignore + return self._GenerateAutonomousDatabaseWallet(self._session, self._host, self._interceptor) # type: ignore @property - def get_autonomous_database(self) -> Callable[ - [oracledatabase.GetAutonomousDatabaseRequest], - autonomous_database.AutonomousDatabase]: + def get_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.GetAutonomousDatabaseRequest], + autonomous_database.AutonomousDatabase, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetAutonomousDatabase(self._session, self._host, self._interceptor) # type: ignore + return self._GetAutonomousDatabase(self._session, self._host, self._interceptor) # type: ignore @property - def get_cloud_exadata_infrastructure(self) -> Callable[ - [oracledatabase.GetCloudExadataInfrastructureRequest], - exadata_infra.CloudExadataInfrastructure]: + def get_cloud_exadata_infrastructure( + self, + ) -> Callable[ + [oracledatabase.GetCloudExadataInfrastructureRequest], + exadata_infra.CloudExadataInfrastructure, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetCloudExadataInfrastructure(self._session, self._host, self._interceptor) # type: ignore + return self._GetCloudExadataInfrastructure(self._session, self._host, self._interceptor) # type: ignore @property - def get_cloud_vm_cluster(self) -> Callable[ - [oracledatabase.GetCloudVmClusterRequest], - vm_cluster.CloudVmCluster]: + def get_cloud_vm_cluster( + self, + ) -> Callable[[oracledatabase.GetCloudVmClusterRequest], vm_cluster.CloudVmCluster]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetCloudVmCluster(self._session, self._host, self._interceptor) # type: ignore + return self._GetCloudVmCluster(self._session, self._host, self._interceptor) # type: ignore @property - def list_autonomous_database_backups(self) -> Callable[ - [oracledatabase.ListAutonomousDatabaseBackupsRequest], - oracledatabase.ListAutonomousDatabaseBackupsResponse]: + def list_autonomous_database_backups( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDatabaseBackupsRequest], + oracledatabase.ListAutonomousDatabaseBackupsResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListAutonomousDatabaseBackups(self._session, self._host, self._interceptor) # type: ignore + return self._ListAutonomousDatabaseBackups(self._session, self._host, self._interceptor) # type: ignore @property - def list_autonomous_database_character_sets(self) -> Callable[ - [oracledatabase.ListAutonomousDatabaseCharacterSetsRequest], - oracledatabase.ListAutonomousDatabaseCharacterSetsResponse]: + def list_autonomous_database_character_sets( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDatabaseCharacterSetsRequest], + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListAutonomousDatabaseCharacterSets(self._session, self._host, self._interceptor) # type: ignore + return self._ListAutonomousDatabaseCharacterSets(self._session, self._host, self._interceptor) # type: ignore @property - def list_autonomous_databases(self) -> Callable[ - [oracledatabase.ListAutonomousDatabasesRequest], - oracledatabase.ListAutonomousDatabasesResponse]: + def list_autonomous_databases( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDatabasesRequest], + oracledatabase.ListAutonomousDatabasesResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListAutonomousDatabases(self._session, self._host, self._interceptor) # type: ignore + return self._ListAutonomousDatabases(self._session, self._host, self._interceptor) # type: ignore @property - def list_autonomous_db_versions(self) -> Callable[ - [oracledatabase.ListAutonomousDbVersionsRequest], - oracledatabase.ListAutonomousDbVersionsResponse]: + def list_autonomous_db_versions( + self, + ) -> Callable[ + [oracledatabase.ListAutonomousDbVersionsRequest], + oracledatabase.ListAutonomousDbVersionsResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListAutonomousDbVersions(self._session, self._host, self._interceptor) # type: ignore + return self._ListAutonomousDbVersions(self._session, self._host, self._interceptor) # type: ignore @property - def list_cloud_exadata_infrastructures(self) -> Callable[ - [oracledatabase.ListCloudExadataInfrastructuresRequest], - oracledatabase.ListCloudExadataInfrastructuresResponse]: + def list_cloud_exadata_infrastructures( + self, + ) -> Callable[ + [oracledatabase.ListCloudExadataInfrastructuresRequest], + oracledatabase.ListCloudExadataInfrastructuresResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListCloudExadataInfrastructures(self._session, self._host, self._interceptor) # type: ignore + return self._ListCloudExadataInfrastructures(self._session, self._host, self._interceptor) # type: ignore @property - def list_cloud_vm_clusters(self) -> Callable[ - [oracledatabase.ListCloudVmClustersRequest], - oracledatabase.ListCloudVmClustersResponse]: + def list_cloud_vm_clusters( + self, + ) -> Callable[ + [oracledatabase.ListCloudVmClustersRequest], + oracledatabase.ListCloudVmClustersResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListCloudVmClusters(self._session, self._host, self._interceptor) # type: ignore + return self._ListCloudVmClusters(self._session, self._host, self._interceptor) # type: ignore @property - def list_db_nodes(self) -> Callable[ - [oracledatabase.ListDbNodesRequest], - oracledatabase.ListDbNodesResponse]: + def list_db_nodes( + self, + ) -> Callable[ + [oracledatabase.ListDbNodesRequest], oracledatabase.ListDbNodesResponse + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListDbNodes(self._session, self._host, self._interceptor) # type: ignore + return self._ListDbNodes(self._session, self._host, self._interceptor) # type: ignore @property - def list_db_servers(self) -> Callable[ - [oracledatabase.ListDbServersRequest], - oracledatabase.ListDbServersResponse]: + def list_db_servers( + self, + ) -> Callable[ + [oracledatabase.ListDbServersRequest], oracledatabase.ListDbServersResponse + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListDbServers(self._session, self._host, self._interceptor) # type: ignore + return self._ListDbServers(self._session, self._host, self._interceptor) # type: ignore @property - def list_db_system_shapes(self) -> Callable[ - [oracledatabase.ListDbSystemShapesRequest], - oracledatabase.ListDbSystemShapesResponse]: + def list_db_system_shapes( + self, + ) -> Callable[ + [oracledatabase.ListDbSystemShapesRequest], + oracledatabase.ListDbSystemShapesResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListDbSystemShapes(self._session, self._host, self._interceptor) # type: ignore + return self._ListDbSystemShapes(self._session, self._host, self._interceptor) # type: ignore @property - def list_entitlements(self) -> Callable[ - [oracledatabase.ListEntitlementsRequest], - oracledatabase.ListEntitlementsResponse]: + def list_entitlements( + self, + ) -> Callable[ + [oracledatabase.ListEntitlementsRequest], + oracledatabase.ListEntitlementsResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListEntitlements(self._session, self._host, self._interceptor) # type: ignore + return self._ListEntitlements(self._session, self._host, self._interceptor) # type: ignore @property - def list_gi_versions(self) -> Callable[ - [oracledatabase.ListGiVersionsRequest], - oracledatabase.ListGiVersionsResponse]: + def list_gi_versions( + self, + ) -> Callable[ + [oracledatabase.ListGiVersionsRequest], oracledatabase.ListGiVersionsResponse + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListGiVersions(self._session, self._host, self._interceptor) # type: ignore + return self._ListGiVersions(self._session, self._host, self._interceptor) # type: ignore @property - def restore_autonomous_database(self) -> Callable[ - [oracledatabase.RestoreAutonomousDatabaseRequest], - operations_pb2.Operation]: + def restore_autonomous_database( + self, + ) -> Callable[ + [oracledatabase.RestoreAutonomousDatabaseRequest], operations_pb2.Operation + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._RestoreAutonomousDatabase(self._session, self._host, self._interceptor) # type: ignore + return self._RestoreAutonomousDatabase(self._session, self._host, self._interceptor) # type: ignore @property def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore class _GetLocation(OracleDatabaseRestStub): - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> locations_pb2.Location: - + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: r"""Call the get location method over HTTP. Args: @@ -2837,26 +3344,26 @@ def __call__(self, locations_pb2.Location: Response from GetLocation method. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, ] request, metadata = self._interceptor.pre_get_location(request, metadata) request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), @@ -2877,16 +3384,17 @@ def __call__(self, @property def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore class _ListLocations(OracleDatabaseRestStub): - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> locations_pb2.ListLocationsResponse: - + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: r"""Call the list locations method over HTTP. Args: @@ -2902,26 +3410,26 @@ def __call__(self, locations_pb2.ListLocationsResponse: Response from ListLocations method. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/locations', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, ] request, metadata = self._interceptor.pre_list_locations(request, metadata) request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), @@ -2942,16 +3450,17 @@ def __call__(self, @property def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore class _CancelOperation(OracleDatabaseRestStub): - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: r"""Call the cancel operation method over HTTP. Args: @@ -2964,28 +3473,30 @@ def __call__(self, sent along with the request as metadata. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, ] - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), @@ -3004,16 +3515,17 @@ def __call__(self, @property def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore class _DeleteOperation(OracleDatabaseRestStub): - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: r"""Call the delete operation method over HTTP. Args: @@ -3026,26 +3538,28 @@ def __call__(self, sent along with the request as metadata. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] - request, metadata = self._interceptor.pre_delete_operation(request, metadata) + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), @@ -3063,16 +3577,17 @@ def __call__(self, @property def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore class _GetOperation(OracleDatabaseRestStub): - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. Args: @@ -3088,26 +3603,26 @@ def __call__(self, operations_pb2.Operation: Response from GetOperation method. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] request, metadata = self._interceptor.pre_get_operation(request, metadata) request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), @@ -3128,16 +3643,17 @@ def __call__(self, @property def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore class _ListOperations(OracleDatabaseRestStub): - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.ListOperationsResponse: - + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. Args: @@ -3153,26 +3669,26 @@ def __call__(self, operations_pb2.ListOperationsResponse: Response from ListOperations method. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, ] request, metadata = self._interceptor.pre_list_operations(request, metadata) request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + uri = transcoded_request["uri"] + method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) # Send the request headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), @@ -3199,6 +3715,4 @@ def close(self): self._session.close() -__all__=( - 'OracleDatabaseRestTransport', -) +__all__ = ("OracleDatabaseRestTransport",) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/__init__.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/__init__.py new file mode 100644 index 000000000000..e5079e7c48c9 --- /dev/null +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/__init__.py @@ -0,0 +1,157 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .autonomous_database import ( + AllConnectionStrings, + AutonomousDatabase, + AutonomousDatabaseApex, + AutonomousDatabaseConnectionStrings, + AutonomousDatabaseConnectionUrls, + AutonomousDatabaseProperties, + AutonomousDatabaseStandbySummary, + DatabaseConnectionStringProfile, + DBWorkload, + GenerateType, + OperationsInsightsState, + ScheduledOperationDetails, + State, +) +from .autonomous_database_character_set import AutonomousDatabaseCharacterSet +from .autonomous_db_backup import ( + AutonomousDatabaseBackup, + AutonomousDatabaseBackupProperties, +) +from .autonomous_db_version import AutonomousDbVersion +from .common import CustomerContact +from .db_node import DbNode, DbNodeProperties +from .db_server import DbServer, DbServerProperties +from .db_system_shape import DbSystemShape +from .entitlement import CloudAccountDetails, Entitlement +from .exadata_infra import ( + CloudExadataInfrastructure, + CloudExadataInfrastructureProperties, + MaintenanceWindow, +) +from .gi_version import GiVersion +from .location_metadata import LocationMetadata +from .oracledatabase import ( + CreateAutonomousDatabaseRequest, + CreateCloudExadataInfrastructureRequest, + CreateCloudVmClusterRequest, + DeleteAutonomousDatabaseRequest, + DeleteCloudExadataInfrastructureRequest, + DeleteCloudVmClusterRequest, + GenerateAutonomousDatabaseWalletRequest, + GenerateAutonomousDatabaseWalletResponse, + GetAutonomousDatabaseRequest, + GetCloudExadataInfrastructureRequest, + GetCloudVmClusterRequest, + ListAutonomousDatabaseBackupsRequest, + ListAutonomousDatabaseBackupsResponse, + ListAutonomousDatabaseCharacterSetsRequest, + ListAutonomousDatabaseCharacterSetsResponse, + ListAutonomousDatabasesRequest, + ListAutonomousDatabasesResponse, + ListAutonomousDbVersionsRequest, + ListAutonomousDbVersionsResponse, + ListCloudExadataInfrastructuresRequest, + ListCloudExadataInfrastructuresResponse, + ListCloudVmClustersRequest, + ListCloudVmClustersResponse, + ListDbNodesRequest, + ListDbNodesResponse, + ListDbServersRequest, + ListDbServersResponse, + ListDbSystemShapesRequest, + ListDbSystemShapesResponse, + ListEntitlementsRequest, + ListEntitlementsResponse, + ListGiVersionsRequest, + ListGiVersionsResponse, + OperationMetadata, + RestoreAutonomousDatabaseRequest, +) +from .vm_cluster import CloudVmCluster, CloudVmClusterProperties, DataCollectionOptions + +__all__ = ( + "AllConnectionStrings", + "AutonomousDatabase", + "AutonomousDatabaseApex", + "AutonomousDatabaseConnectionStrings", + "AutonomousDatabaseConnectionUrls", + "AutonomousDatabaseProperties", + "AutonomousDatabaseStandbySummary", + "DatabaseConnectionStringProfile", + "ScheduledOperationDetails", + "DBWorkload", + "GenerateType", + "OperationsInsightsState", + "State", + "AutonomousDatabaseCharacterSet", + "AutonomousDatabaseBackup", + "AutonomousDatabaseBackupProperties", + "AutonomousDbVersion", + "CustomerContact", + "DbNode", + "DbNodeProperties", + "DbServer", + "DbServerProperties", + "DbSystemShape", + "CloudAccountDetails", + "Entitlement", + "CloudExadataInfrastructure", + "CloudExadataInfrastructureProperties", + "MaintenanceWindow", + "GiVersion", + "LocationMetadata", + "CreateAutonomousDatabaseRequest", + "CreateCloudExadataInfrastructureRequest", + "CreateCloudVmClusterRequest", + "DeleteAutonomousDatabaseRequest", + "DeleteCloudExadataInfrastructureRequest", + "DeleteCloudVmClusterRequest", + "GenerateAutonomousDatabaseWalletRequest", + "GenerateAutonomousDatabaseWalletResponse", + "GetAutonomousDatabaseRequest", + "GetCloudExadataInfrastructureRequest", + "GetCloudVmClusterRequest", + "ListAutonomousDatabaseBackupsRequest", + "ListAutonomousDatabaseBackupsResponse", + "ListAutonomousDatabaseCharacterSetsRequest", + "ListAutonomousDatabaseCharacterSetsResponse", + "ListAutonomousDatabasesRequest", + "ListAutonomousDatabasesResponse", + "ListAutonomousDbVersionsRequest", + "ListAutonomousDbVersionsResponse", + "ListCloudExadataInfrastructuresRequest", + "ListCloudExadataInfrastructuresResponse", + "ListCloudVmClustersRequest", + "ListCloudVmClustersResponse", + "ListDbNodesRequest", + "ListDbNodesResponse", + "ListDbServersRequest", + "ListDbServersResponse", + "ListDbSystemShapesRequest", + "ListDbSystemShapesResponse", + "ListEntitlementsRequest", + "ListEntitlementsResponse", + "ListGiVersionsRequest", + "ListGiVersionsResponse", + "OperationMetadata", + "RestoreAutonomousDatabaseRequest", + "CloudVmCluster", + "CloudVmClusterProperties", + "DataCollectionOptions", +) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_database.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_database.py similarity index 96% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_database.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_database.py index f1e5c317c8ac..907ef93bc4d5 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_database.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_database.py @@ -17,31 +17,30 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - -from google.cloud.oracledatabase_v1.types import common from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.type import dayofweek_pb2 # type: ignore from google.type import timeofday_pb2 # type: ignore +import proto # type: ignore +from google.cloud.oracledatabase_v1.types import common __protobuf__ = proto.module( - package='google.cloud.oracledatabase.v1', + package="google.cloud.oracledatabase.v1", manifest={ - 'GenerateType', - 'State', - 'OperationsInsightsState', - 'DBWorkload', - 'AutonomousDatabase', - 'AutonomousDatabaseProperties', - 'AutonomousDatabaseApex', - 'AutonomousDatabaseConnectionStrings', - 'DatabaseConnectionStringProfile', - 'AllConnectionStrings', - 'AutonomousDatabaseConnectionUrls', - 'AutonomousDatabaseStandbySummary', - 'ScheduledOperationDetails', + "GenerateType", + "State", + "OperationsInsightsState", + "DBWorkload", + "AutonomousDatabase", + "AutonomousDatabaseProperties", + "AutonomousDatabaseApex", + "AutonomousDatabaseConnectionStrings", + "DatabaseConnectionStringProfile", + "AllConnectionStrings", + "AutonomousDatabaseConnectionUrls", + "AutonomousDatabaseStandbySummary", + "ScheduledOperationDetails", }, ) @@ -277,10 +276,10 @@ class AutonomousDatabase(proto.Message): proto.STRING, number=6, ) - properties: 'AutonomousDatabaseProperties' = proto.Field( + properties: "AutonomousDatabaseProperties" = proto.Field( proto.MESSAGE, number=7, - message='AutonomousDatabaseProperties', + message="AutonomousDatabaseProperties", ) labels: MutableMapping[str, str] = proto.MapField( proto.STRING, @@ -506,6 +505,7 @@ class AutonomousDatabaseProperties(proto.Message): Output only. The date and time when maintenance will end. """ + class DatabaseEdition(proto.Enum): r"""The editions available for the Autonomous Database. @@ -733,10 +733,10 @@ class Role(proto.Enum): proto.INT32, number=63, ) - db_workload: 'DBWorkload' = proto.Field( + db_workload: "DBWorkload" = proto.Field( proto.ENUM, number=5, - enum='DBWorkload', + enum="DBWorkload", ) db_edition: DatabaseEdition = proto.Field( proto.ENUM, @@ -810,10 +810,10 @@ class Role(proto.Enum): proto.DOUBLE, number=22, ) - apex_details: 'AutonomousDatabaseApex' = proto.Field( + apex_details: "AutonomousDatabaseApex" = proto.Field( proto.MESSAGE, number=23, - message='AutonomousDatabaseApex', + message="AutonomousDatabaseApex", ) are_primary_allowlisted_ips_used: bool = proto.Field( proto.BOOL, @@ -824,10 +824,10 @@ class Role(proto.Enum): proto.STRING, number=25, ) - state: 'State' = proto.Field( + state: "State" = proto.Field( proto.ENUM, number=26, - enum='State', + enum="State", ) autonomous_container_database_id: str = proto.Field( proto.STRING, @@ -837,15 +837,15 @@ class Role(proto.Enum): proto.STRING, number=28, ) - connection_strings: 'AutonomousDatabaseConnectionStrings' = proto.Field( + connection_strings: "AutonomousDatabaseConnectionStrings" = proto.Field( proto.MESSAGE, number=29, - message='AutonomousDatabaseConnectionStrings', + message="AutonomousDatabaseConnectionStrings", ) - connection_urls: 'AutonomousDatabaseConnectionUrls' = proto.Field( + connection_urls: "AutonomousDatabaseConnectionUrls" = proto.Field( proto.MESSAGE, number=30, - message='AutonomousDatabaseConnectionUrls', + message="AutonomousDatabaseConnectionUrls", ) failed_data_recovery_duration: duration_pb2.Duration = proto.Field( proto.MESSAGE, @@ -864,10 +864,10 @@ class Role(proto.Enum): proto.INT32, number=35, ) - local_standby_db: 'AutonomousDatabaseStandbySummary' = proto.Field( + local_standby_db: "AutonomousDatabaseStandbySummary" = proto.Field( proto.MESSAGE, number=36, - message='AutonomousDatabaseStandbySummary', + message="AutonomousDatabaseStandbySummary", ) memory_per_oracle_compute_unit_gbs: int = proto.Field( proto.INT32, @@ -893,10 +893,10 @@ class Role(proto.Enum): number=41, enum=OpenMode, ) - operations_insights_state: 'OperationsInsightsState' = proto.Field( + operations_insights_state: "OperationsInsightsState" = proto.Field( proto.ENUM, number=42, - enum='OperationsInsightsState', + enum="OperationsInsightsState", ) peer_db_ids: MutableSequence[str] = proto.RepeatedField( proto.STRING, @@ -926,10 +926,12 @@ class Role(proto.Enum): number=48, enum=Role, ) - scheduled_operation_details: MutableSequence['ScheduledOperationDetails'] = proto.RepeatedField( + scheduled_operation_details: MutableSequence[ + "ScheduledOperationDetails" + ] = proto.RepeatedField( proto.MESSAGE, number=64, - message='ScheduledOperationDetails', + message="ScheduledOperationDetails", ) sql_web_developer_url: str = proto.Field( proto.STRING, @@ -1023,10 +1025,10 @@ class AutonomousDatabaseConnectionStrings(proto.Message): select values based on the structured metadata. """ - all_connection_strings: 'AllConnectionStrings' = proto.Field( + all_connection_strings: "AllConnectionStrings" = proto.Field( proto.MESSAGE, number=1, - message='AllConnectionStrings', + message="AllConnectionStrings", ) dedicated: str = proto.Field( proto.STRING, @@ -1044,10 +1046,10 @@ class AutonomousDatabaseConnectionStrings(proto.Message): proto.STRING, number=5, ) - profiles: MutableSequence['DatabaseConnectionStringProfile'] = proto.RepeatedField( + profiles: MutableSequence["DatabaseConnectionStringProfile"] = proto.RepeatedField( proto.MESSAGE, number=6, - message='DatabaseConnectionStringProfile', + message="DatabaseConnectionStringProfile", ) @@ -1085,6 +1087,7 @@ class DatabaseConnectionStringProfile(proto.Message): Output only. The value of the connection string. """ + class ConsumerGroup(proto.Enum): r"""The various consumer groups available in the connection string profile. @@ -1368,10 +1371,10 @@ class AutonomousDatabaseStandbySummary(proto.Message): proto.STRING, number=2, ) - state: 'State' = proto.Field( + state: "State" = proto.Field( proto.ENUM, number=3, - enum='State', + enum="State", ) data_guard_role_changed_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_database_character_set.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_database_character_set.py similarity index 96% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_database_character_set.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_database_character_set.py index dacd4b6dc95b..dd6bfd509fce 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_database_character_set.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_database_character_set.py @@ -19,11 +19,10 @@ import proto # type: ignore - __protobuf__ = proto.module( - package='google.cloud.oracledatabase.v1', + package="google.cloud.oracledatabase.v1", manifest={ - 'AutonomousDatabaseCharacterSet', + "AutonomousDatabaseCharacterSet", }, ) @@ -45,6 +44,7 @@ class AutonomousDatabaseCharacterSet(proto.Message): Autonomous Database which is the ID in the resource name. """ + class CharacterSetType(proto.Enum): r"""The type of character set an Autonomous Database can have. diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_db_backup.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_db_backup.py similarity index 97% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_db_backup.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_db_backup.py index c7f900cfb859..1f15eb50e02a 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_db_backup.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_db_backup.py @@ -17,16 +17,14 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - from google.protobuf import timestamp_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( - package='google.cloud.oracledatabase.v1', + package="google.cloud.oracledatabase.v1", manifest={ - 'AutonomousDatabaseBackup', - 'AutonomousDatabaseBackupProperties', + "AutonomousDatabaseBackup", + "AutonomousDatabaseBackupProperties", }, ) @@ -66,10 +64,10 @@ class AutonomousDatabaseBackup(proto.Message): proto.STRING, number=3, ) - properties: 'AutonomousDatabaseBackupProperties' = proto.Field( + properties: "AutonomousDatabaseBackupProperties" = proto.Field( proto.MESSAGE, number=4, - message='AutonomousDatabaseBackupProperties', + message="AutonomousDatabaseBackupProperties", ) labels: MutableMapping[str, str] = proto.MapField( proto.STRING, @@ -148,6 +146,7 @@ class AutonomousDatabaseBackupProperties(proto.Message): vault_id (str): Optional. The OCID of the vault. """ + class State(proto.Enum): r"""// The various lifecycle states of the Autonomous Database Backup. diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_db_version.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_db_version.py similarity index 96% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_db_version.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_db_version.py index f6773072c559..05189694df98 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/autonomous_db_version.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/autonomous_db_version.py @@ -21,11 +21,10 @@ from google.cloud.oracledatabase_v1.types import autonomous_database - __protobuf__ = proto.module( - package='google.cloud.oracledatabase.v1', + package="google.cloud.oracledatabase.v1", manifest={ - 'AutonomousDbVersion', + "AutonomousDbVersion", }, ) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/common.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/common.py similarity index 94% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/common.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/common.py index 54ef447a9f93..2357b454221c 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/common.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/common.py @@ -19,11 +19,10 @@ import proto # type: ignore - __protobuf__ = proto.module( - package='google.cloud.oracledatabase.v1', + package="google.cloud.oracledatabase.v1", manifest={ - 'CustomerContact', + "CustomerContact", }, ) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_node.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_node.py similarity index 96% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_node.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_node.py index 499c37d17305..4f0a7175908f 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_node.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_node.py @@ -19,12 +19,11 @@ import proto # type: ignore - __protobuf__ = proto.module( - package='google.cloud.oracledatabase.v1', + package="google.cloud.oracledatabase.v1", manifest={ - 'DbNode', - 'DbNodeProperties', + "DbNode", + "DbNodeProperties", }, ) @@ -47,10 +46,10 @@ class DbNode(proto.Message): proto.STRING, number=1, ) - properties: 'DbNodeProperties' = proto.Field( + properties: "DbNodeProperties" = proto.Field( proto.MESSAGE, number=3, - message='DbNodeProperties', + message="DbNodeProperties", ) @@ -75,6 +74,7 @@ class DbNodeProperties(proto.Message): total_cpu_core_count (int): Total CPU core count of the database node. """ + class State(proto.Enum): r"""The various lifecycle states of the database node. diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_server.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_server.py similarity index 96% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_server.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_server.py index f7b0a1cfc988..ac60975560d4 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_server.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_server.py @@ -19,12 +19,11 @@ import proto # type: ignore - __protobuf__ = proto.module( - package='google.cloud.oracledatabase.v1', + package="google.cloud.oracledatabase.v1", manifest={ - 'DbServer', - 'DbServerProperties', + "DbServer", + "DbServerProperties", }, ) @@ -54,10 +53,10 @@ class DbServer(proto.Message): proto.STRING, number=2, ) - properties: 'DbServerProperties' = proto.Field( + properties: "DbServerProperties" = proto.Field( proto.MESSAGE, number=3, - message='DbServerProperties', + message="DbServerProperties", ) @@ -88,6 +87,7 @@ class DbServerProperties(proto.Message): Output only. OCID of database nodes associated with the database server. """ + class State(proto.Enum): r"""The various lifecycle states of the database server. diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_system_shape.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_system_shape.py similarity index 97% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_system_shape.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_system_shape.py index 98637445e52b..7429af46b6cc 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/db_system_shape.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/db_system_shape.py @@ -19,11 +19,10 @@ import proto # type: ignore - __protobuf__ = proto.module( - package='google.cloud.oracledatabase.v1', + package="google.cloud.oracledatabase.v1", manifest={ - 'DbSystemShape', + "DbSystemShape", }, ) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/entitlement.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/entitlement.py similarity index 94% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/entitlement.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/entitlement.py index 6314fc926c5e..01b82a412c0b 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/entitlement.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/entitlement.py @@ -19,12 +19,11 @@ import proto # type: ignore - __protobuf__ = proto.module( - package='google.cloud.oracledatabase.v1', + package="google.cloud.oracledatabase.v1", manifest={ - 'Entitlement', - 'CloudAccountDetails', + "Entitlement", + "CloudAccountDetails", }, ) @@ -45,6 +44,7 @@ class Entitlement(proto.Message): state (google.cloud.oracledatabase_v1.types.Entitlement.State): Output only. Entitlement State. """ + class State(proto.Enum): r"""The various lifecycle states of the subscription. @@ -67,10 +67,10 @@ class State(proto.Enum): proto.STRING, number=1, ) - cloud_account_details: 'CloudAccountDetails' = proto.Field( + cloud_account_details: "CloudAccountDetails" = proto.Field( proto.MESSAGE, number=2, - message='CloudAccountDetails', + message="CloudAccountDetails", ) entitlement_id: str = proto.Field( proto.STRING, diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/exadata_infra.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/exadata_infra.py similarity index 97% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/exadata_infra.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/exadata_infra.py index 7e4669189feb..c57ed47f33ef 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/exadata_infra.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/exadata_infra.py @@ -17,20 +17,19 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - -from google.cloud.oracledatabase_v1.types import common from google.protobuf import timestamp_pb2 # type: ignore from google.type import dayofweek_pb2 # type: ignore from google.type import month_pb2 # type: ignore +import proto # type: ignore +from google.cloud.oracledatabase_v1.types import common __protobuf__ = proto.module( - package='google.cloud.oracledatabase.v1', + package="google.cloud.oracledatabase.v1", manifest={ - 'CloudExadataInfrastructure', - 'CloudExadataInfrastructureProperties', - 'MaintenanceWindow', + "CloudExadataInfrastructure", + "CloudExadataInfrastructureProperties", + "MaintenanceWindow", }, ) @@ -80,10 +79,10 @@ class CloudExadataInfrastructure(proto.Message): proto.STRING, number=4, ) - properties: 'CloudExadataInfrastructureProperties' = proto.Field( + properties: "CloudExadataInfrastructureProperties" = proto.Field( proto.MESSAGE, number=5, - message='CloudExadataInfrastructureProperties', + message="CloudExadataInfrastructureProperties", ) labels: MutableMapping[str, str] = proto.MapField( proto.STRING, @@ -189,6 +188,7 @@ class CloudExadataInfrastructureProperties(proto.Message): the database servers (dom0) in the Exadata Infrastructure. Example: 20.1.15 """ + class State(proto.Enum): r"""The various lifecycle states of the Exadata Infrastructure. @@ -243,10 +243,10 @@ class State(proto.Enum): proto.INT32, number=5, ) - maintenance_window: 'MaintenanceWindow' = proto.Field( + maintenance_window: "MaintenanceWindow" = proto.Field( proto.MESSAGE, number=6, - message='MaintenanceWindow', + message="MaintenanceWindow", ) state: State = proto.Field( proto.ENUM, @@ -390,6 +390,7 @@ class MaintenanceWindow(proto.Message): of a custom action timeout (waiting period) between database server patching operations. """ + class MaintenanceWindowPreference(proto.Enum): r"""Maintenance window preference. diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/gi_version.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/gi_version.py similarity index 95% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/gi_version.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/gi_version.py index bc14de4f4a9c..1ecf83198d06 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/gi_version.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/gi_version.py @@ -19,11 +19,10 @@ import proto # type: ignore - __protobuf__ = proto.module( - package='google.cloud.oracledatabase.v1', + package="google.cloud.oracledatabase.v1", manifest={ - 'GiVersion', + "GiVersion", }, ) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/location_metadata.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/location_metadata.py similarity index 94% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/location_metadata.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/location_metadata.py index f6d3d2d71fc0..f81798592e71 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/location_metadata.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/location_metadata.py @@ -19,11 +19,10 @@ import proto # type: ignore - __protobuf__ = proto.module( - package='google.cloud.oracledatabase.v1', + package="google.cloud.oracledatabase.v1", manifest={ - 'LocationMetadata', + "LocationMetadata", }, ) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/oracledatabase.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/oracledatabase.py similarity index 92% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/oracledatabase.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/oracledatabase.py index 038dbf0e4c44..796dbe2203f9 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/oracledatabase.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/oracledatabase.py @@ -17,60 +17,63 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - -from google.cloud.oracledatabase_v1.types import autonomous_database as gco_autonomous_database -from google.cloud.oracledatabase_v1.types import autonomous_database_character_set -from google.cloud.oracledatabase_v1.types import autonomous_db_backup -from google.cloud.oracledatabase_v1.types import autonomous_db_version -from google.cloud.oracledatabase_v1.types import db_node -from google.cloud.oracledatabase_v1.types import db_server -from google.cloud.oracledatabase_v1.types import db_system_shape -from google.cloud.oracledatabase_v1.types import entitlement -from google.cloud.oracledatabase_v1.types import exadata_infra -from google.cloud.oracledatabase_v1.types import gi_version -from google.cloud.oracledatabase_v1.types import vm_cluster from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore +from google.cloud.oracledatabase_v1.types import ( + autonomous_database_character_set, + autonomous_db_backup, + autonomous_db_version, + db_node, + db_server, + db_system_shape, + entitlement, + exadata_infra, + gi_version, + vm_cluster, +) +from google.cloud.oracledatabase_v1.types import ( + autonomous_database as gco_autonomous_database, +) __protobuf__ = proto.module( - package='google.cloud.oracledatabase.v1', + package="google.cloud.oracledatabase.v1", manifest={ - 'ListCloudExadataInfrastructuresRequest', - 'ListCloudExadataInfrastructuresResponse', - 'GetCloudExadataInfrastructureRequest', - 'CreateCloudExadataInfrastructureRequest', - 'DeleteCloudExadataInfrastructureRequest', - 'ListCloudVmClustersRequest', - 'ListCloudVmClustersResponse', - 'GetCloudVmClusterRequest', - 'CreateCloudVmClusterRequest', - 'DeleteCloudVmClusterRequest', - 'ListEntitlementsRequest', - 'ListEntitlementsResponse', - 'ListDbServersRequest', - 'ListDbServersResponse', - 'ListDbNodesRequest', - 'ListDbNodesResponse', - 'ListGiVersionsRequest', - 'ListGiVersionsResponse', - 'ListDbSystemShapesRequest', - 'ListDbSystemShapesResponse', - 'OperationMetadata', - 'ListAutonomousDatabasesRequest', - 'ListAutonomousDatabasesResponse', - 'GetAutonomousDatabaseRequest', - 'CreateAutonomousDatabaseRequest', - 'DeleteAutonomousDatabaseRequest', - 'RestoreAutonomousDatabaseRequest', - 'GenerateAutonomousDatabaseWalletRequest', - 'GenerateAutonomousDatabaseWalletResponse', - 'ListAutonomousDbVersionsRequest', - 'ListAutonomousDbVersionsResponse', - 'ListAutonomousDatabaseCharacterSetsRequest', - 'ListAutonomousDatabaseCharacterSetsResponse', - 'ListAutonomousDatabaseBackupsRequest', - 'ListAutonomousDatabaseBackupsResponse', + "ListCloudExadataInfrastructuresRequest", + "ListCloudExadataInfrastructuresResponse", + "GetCloudExadataInfrastructureRequest", + "CreateCloudExadataInfrastructureRequest", + "DeleteCloudExadataInfrastructureRequest", + "ListCloudVmClustersRequest", + "ListCloudVmClustersResponse", + "GetCloudVmClusterRequest", + "CreateCloudVmClusterRequest", + "DeleteCloudVmClusterRequest", + "ListEntitlementsRequest", + "ListEntitlementsResponse", + "ListDbServersRequest", + "ListDbServersResponse", + "ListDbNodesRequest", + "ListDbNodesResponse", + "ListGiVersionsRequest", + "ListGiVersionsResponse", + "ListDbSystemShapesRequest", + "ListDbSystemShapesResponse", + "OperationMetadata", + "ListAutonomousDatabasesRequest", + "ListAutonomousDatabasesResponse", + "GetAutonomousDatabaseRequest", + "CreateAutonomousDatabaseRequest", + "DeleteAutonomousDatabaseRequest", + "RestoreAutonomousDatabaseRequest", + "GenerateAutonomousDatabaseWalletRequest", + "GenerateAutonomousDatabaseWalletResponse", + "ListAutonomousDbVersionsRequest", + "ListAutonomousDbVersionsResponse", + "ListAutonomousDatabaseCharacterSetsRequest", + "ListAutonomousDatabaseCharacterSetsResponse", + "ListAutonomousDatabaseBackupsRequest", + "ListAutonomousDatabaseBackupsResponse", }, ) @@ -122,7 +125,9 @@ class ListCloudExadataInfrastructuresResponse(proto.Message): def raw_page(self): return self - cloud_exadata_infrastructures: MutableSequence[exadata_infra.CloudExadataInfrastructure] = proto.RepeatedField( + cloud_exadata_infrastructures: MutableSequence[ + exadata_infra.CloudExadataInfrastructure + ] = proto.RepeatedField( proto.MESSAGE, number=1, message=exadata_infra.CloudExadataInfrastructure, @@ -189,10 +194,12 @@ class CreateCloudExadataInfrastructureRequest(proto.Message): proto.STRING, number=2, ) - cloud_exadata_infrastructure: exadata_infra.CloudExadataInfrastructure = proto.Field( - proto.MESSAGE, - number=3, - message=exadata_infra.CloudExadataInfrastructure, + cloud_exadata_infrastructure: exadata_infra.CloudExadataInfrastructure = ( + proto.Field( + proto.MESSAGE, + number=3, + message=exadata_infra.CloudExadataInfrastructure, + ) ) request_id: str = proto.Field( proto.STRING, @@ -694,7 +701,9 @@ class ListDbSystemShapesResponse(proto.Message): def raw_page(self): return self - db_system_shapes: MutableSequence[db_system_shape.DbSystemShape] = proto.RepeatedField( + db_system_shapes: MutableSequence[ + db_system_shape.DbSystemShape + ] = proto.RepeatedField( proto.MESSAGE, number=1, message=db_system_shape.DbSystemShape, @@ -835,7 +844,9 @@ class ListAutonomousDatabasesResponse(proto.Message): def raw_page(self): return self - autonomous_databases: MutableSequence[gco_autonomous_database.AutonomousDatabase] = proto.RepeatedField( + autonomous_databases: MutableSequence[ + gco_autonomous_database.AutonomousDatabase + ] = proto.RepeatedField( proto.MESSAGE, number=1, message=gco_autonomous_database.AutonomousDatabase, @@ -1071,7 +1082,9 @@ class ListAutonomousDbVersionsResponse(proto.Message): def raw_page(self): return self - autonomous_db_versions: MutableSequence[autonomous_db_version.AutonomousDbVersion] = proto.RepeatedField( + autonomous_db_versions: MutableSequence[ + autonomous_db_version.AutonomousDbVersion + ] = proto.RepeatedField( proto.MESSAGE, number=1, message=autonomous_db_version.AutonomousDbVersion, @@ -1141,7 +1154,9 @@ class ListAutonomousDatabaseCharacterSetsResponse(proto.Message): def raw_page(self): return self - autonomous_database_character_sets: MutableSequence[autonomous_database_character_set.AutonomousDatabaseCharacterSet] = proto.RepeatedField( + autonomous_database_character_sets: MutableSequence[ + autonomous_database_character_set.AutonomousDatabaseCharacterSet + ] = proto.RepeatedField( proto.MESSAGE, number=1, message=autonomous_database_character_set.AutonomousDatabaseCharacterSet, @@ -1213,7 +1228,9 @@ class ListAutonomousDatabaseBackupsResponse(proto.Message): def raw_page(self): return self - autonomous_database_backups: MutableSequence[autonomous_db_backup.AutonomousDatabaseBackup] = proto.RepeatedField( + autonomous_database_backups: MutableSequence[ + autonomous_db_backup.AutonomousDatabaseBackup + ] = proto.RepeatedField( proto.MESSAGE, number=1, message=autonomous_db_backup.AutonomousDatabaseBackup, diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/vm_cluster.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/vm_cluster.py similarity index 97% rename from owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/vm_cluster.py rename to packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/vm_cluster.py index 3fe0ae3b740b..44104d291bd3 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/google/cloud/oracledatabase_v1/types/vm_cluster.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/vm_cluster.py @@ -17,18 +17,16 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - from google.protobuf import timestamp_pb2 # type: ignore from google.type import datetime_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( - package='google.cloud.oracledatabase.v1', + package="google.cloud.oracledatabase.v1", manifest={ - 'CloudVmCluster', - 'CloudVmClusterProperties', - 'DataCollectionOptions', + "CloudVmCluster", + "CloudVmClusterProperties", + "DataCollectionOptions", }, ) @@ -91,10 +89,10 @@ class CloudVmCluster(proto.Message): proto.STRING, number=12, ) - properties: 'CloudVmClusterProperties' = proto.Field( + properties: "CloudVmClusterProperties" = proto.Field( proto.MESSAGE, number=6, - message='CloudVmClusterProperties', + message="CloudVmClusterProperties", ) labels: MutableMapping[str, str] = proto.MapField( proto.STRING, @@ -204,6 +202,7 @@ class CloudVmClusterProperties(proto.Message): cluster_name (str): Optional. OCI Cluster name. """ + class LicenseType(proto.Enum): r"""Different licenses supported. @@ -338,10 +337,10 @@ class State(proto.Enum): proto.STRING, number=16, ) - diagnostics_data_collection_options: 'DataCollectionOptions' = proto.Field( + diagnostics_data_collection_options: "DataCollectionOptions" = proto.Field( proto.MESSAGE, number=19, - message='DataCollectionOptions', + message="DataCollectionOptions", ) state: State = proto.Field( proto.ENUM, diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/mypy.ini b/packages/google-cloud-oracledatabase/mypy.ini similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/mypy.ini rename to packages/google-cloud-oracledatabase/mypy.ini diff --git a/packages/google-cloud-oracledatabase/noxfile.py b/packages/google-cloud-oracledatabase/noxfile.py new file mode 100644 index 000000000000..67b7265f7586 --- /dev/null +++ b/packages/google-cloud-oracledatabase/noxfile.py @@ -0,0 +1,452 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + + +DEFAULT_PYTHON_VERSION = "3.10" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.12") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """Run all tests with prerelease versions of dependencies installed.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "grpc-google-iam-v1", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_autonomous_database_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_autonomous_database_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_autonomous_database_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_autonomous_database_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_exadata_infrastructure_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_exadata_infrastructure_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_exadata_infrastructure_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_exadata_infrastructure_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_vm_cluster_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_vm_cluster_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_vm_cluster_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_create_cloud_vm_cluster_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_autonomous_database_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_autonomous_database_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_autonomous_database_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_autonomous_database_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_exadata_infrastructure_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_exadata_infrastructure_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_exadata_infrastructure_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_exadata_infrastructure_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_vm_cluster_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_vm_cluster_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_vm_cluster_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_delete_cloud_vm_cluster_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_generate_autonomous_database_wallet_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_generate_autonomous_database_wallet_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_generate_autonomous_database_wallet_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_generate_autonomous_database_wallet_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_autonomous_database_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_autonomous_database_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_autonomous_database_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_autonomous_database_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_exadata_infrastructure_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_exadata_infrastructure_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_exadata_infrastructure_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_exadata_infrastructure_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_vm_cluster_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_vm_cluster_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_vm_cluster_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_get_cloud_vm_cluster_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_backups_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_backups_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_backups_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_backups_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_character_sets_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_character_sets_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_character_sets_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_database_character_sets_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_databases_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_databases_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_databases_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_databases_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_db_versions_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_db_versions_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_db_versions_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_autonomous_db_versions_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_exadata_infrastructures_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_exadata_infrastructures_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_exadata_infrastructures_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_exadata_infrastructures_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_vm_clusters_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_vm_clusters_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_vm_clusters_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_cloud_vm_clusters_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_nodes_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_nodes_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_nodes_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_nodes_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_servers_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_servers_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_servers_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_servers_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_system_shapes_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_system_shapes_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_system_shapes_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_db_system_shapes_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_entitlements_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_entitlements_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_entitlements_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_entitlements_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_gi_versions_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_gi_versions_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_gi_versions_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_list_gi_versions_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_restore_autonomous_database_sync.py b/packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_restore_autonomous_database_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/oracledatabase_v1_generated_oracle_database_restore_autonomous_database_sync.py rename to packages/google-cloud-oracledatabase/samples/generated_samples/oracledatabase_v1_generated_oracle_database_restore_autonomous_database_sync.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/snippet_metadata_google.cloud.oracledatabase.v1.json b/packages/google-cloud-oracledatabase/samples/generated_samples/snippet_metadata_google.cloud.oracledatabase.v1.json similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/samples/generated_samples/snippet_metadata_google.cloud.oracledatabase.v1.json rename to packages/google-cloud-oracledatabase/samples/generated_samples/snippet_metadata_google.cloud.oracledatabase.v1.json diff --git a/packages/google-cloud-oracledatabase/scripts/decrypt-secrets.sh b/packages/google-cloud-oracledatabase/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-cloud-oracledatabase/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/scripts/fixup_oracledatabase_v1_keywords.py b/packages/google-cloud-oracledatabase/scripts/fixup_oracledatabase_v1_keywords.py similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/scripts/fixup_oracledatabase_v1_keywords.py rename to packages/google-cloud-oracledatabase/scripts/fixup_oracledatabase_v1_keywords.py diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/setup.py b/packages/google-cloud-oracledatabase/setup.py similarity index 93% rename from owl-bot-staging/google-cloud-oracledatabase/v1/setup.py rename to packages/google-cloud-oracledatabase/setup.py index ac8389232396..f4dfafa62eff 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/setup.py +++ b/packages/google-cloud-oracledatabase/setup.py @@ -17,20 +17,22 @@ import os import re -import setuptools # type: ignore +import setuptools # type: ignore package_root = os.path.abspath(os.path.dirname(__file__)) -name = 'google-cloud-oracledatabase' +name = "google-cloud-oracledatabase" description = "Google Cloud Oracledatabase API client library" version = None -with open(os.path.join(package_root, 'google/cloud/oracledatabase/gapic_version.py')) as fp: +with open( + os.path.join(package_root, "google/cloud/oracledatabase/gapic_version.py") +) as fp: version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) + assert len(version_candidates) == 1 version = version_candidates[0] if version[0] == "0": diff --git a/packages/google-cloud-oracledatabase/testing/.gitignore b/packages/google-cloud-oracledatabase/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-oracledatabase/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.10.txt b/packages/google-cloud-oracledatabase/testing/constraints-3.10.txt similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.10.txt rename to packages/google-cloud-oracledatabase/testing/constraints-3.10.txt diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.11.txt b/packages/google-cloud-oracledatabase/testing/constraints-3.11.txt similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.11.txt rename to packages/google-cloud-oracledatabase/testing/constraints-3.11.txt diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.12.txt b/packages/google-cloud-oracledatabase/testing/constraints-3.12.txt similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.12.txt rename to packages/google-cloud-oracledatabase/testing/constraints-3.12.txt diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.7.txt b/packages/google-cloud-oracledatabase/testing/constraints-3.7.txt similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.7.txt rename to packages/google-cloud-oracledatabase/testing/constraints-3.7.txt diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.8.txt b/packages/google-cloud-oracledatabase/testing/constraints-3.8.txt similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.8.txt rename to packages/google-cloud-oracledatabase/testing/constraints-3.8.txt diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.9.txt b/packages/google-cloud-oracledatabase/testing/constraints-3.9.txt similarity index 100% rename from owl-bot-staging/google-cloud-oracledatabase/v1/testing/constraints-3.9.txt rename to packages/google-cloud-oracledatabase/testing/constraints-3.9.txt diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/tests/__init__.py b/packages/google-cloud-oracledatabase/tests/__init__.py similarity index 99% rename from owl-bot-staging/google-cloud-oracledatabase/v1/tests/__init__.py rename to packages/google-cloud-oracledatabase/tests/__init__.py index 7b3de3117f38..8f6cf068242c 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/tests/__init__.py +++ b/packages/google-cloud-oracledatabase/tests/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2024 Google LLC # diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/__init__.py b/packages/google-cloud-oracledatabase/tests/unit/__init__.py similarity index 99% rename from owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/__init__.py rename to packages/google-cloud-oracledatabase/tests/unit/__init__.py index 7b3de3117f38..8f6cf068242c 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/__init__.py +++ b/packages/google-cloud-oracledatabase/tests/unit/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2024 Google LLC # diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/oracledatabase_v1/__init__.py b/packages/google-cloud-oracledatabase/tests/unit/gapic/__init__.py similarity index 99% rename from owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/oracledatabase_v1/__init__.py rename to packages/google-cloud-oracledatabase/tests/unit/gapic/__init__.py index 7b3de3117f38..8f6cf068242c 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/oracledatabase_v1/__init__.py +++ b/packages/google-cloud-oracledatabase/tests/unit/gapic/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2024 Google LLC # diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/__init__.py b/packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1/__init__.py similarity index 99% rename from owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/__init__.py rename to packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1/__init__.py index 7b3de3117f38..8f6cf068242c 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/__init__.py +++ b/packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2024 Google LLC # diff --git a/owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/oracledatabase_v1/test_oracle_database.py b/packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1/test_oracle_database.py similarity index 63% rename from owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/oracledatabase_v1/test_oracle_database.py rename to packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1/test_oracle_database.py index 7ad6633e7c25..05cfe6d9f132 100644 --- a/owl-bot-staging/google-cloud-oracledatabase/v1/tests/unit/gapic/oracledatabase_v1/test_oracle_database.py +++ b/packages/google-cloud-oracledatabase/tests/unit/gapic/oracledatabase_v1/test_oracle_database.py @@ -14,6 +14,7 @@ # limitations under the License. # import os + # try/except added for compatibility with python < 3.8 try: from unittest import mock @@ -21,78 +22,94 @@ except ImportError: # pragma: NO COVER import mock -import grpc -from grpc.experimental import aio from collections.abc import Iterable -from google.protobuf import json_format import json import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format -from google.api_core import client_options +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import api_core_version, client_options from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template from google.api_core import retry as retries +import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.location import locations_pb2 -from google.cloud.oracledatabase_v1.services.oracle_database import OracleDatabaseClient -from google.cloud.oracledatabase_v1.services.oracle_database import pagers -from google.cloud.oracledatabase_v1.services.oracle_database import transports -from google.cloud.oracledatabase_v1.types import autonomous_database -from google.cloud.oracledatabase_v1.types import autonomous_database as gco_autonomous_database -from google.cloud.oracledatabase_v1.types import autonomous_database_character_set -from google.cloud.oracledatabase_v1.types import autonomous_db_backup -from google.cloud.oracledatabase_v1.types import autonomous_db_version -from google.cloud.oracledatabase_v1.types import common -from google.cloud.oracledatabase_v1.types import db_node -from google.cloud.oracledatabase_v1.types import db_server -from google.cloud.oracledatabase_v1.types import db_system_shape -from google.cloud.oracledatabase_v1.types import entitlement -from google.cloud.oracledatabase_v1.types import exadata_infra -from google.cloud.oracledatabase_v1.types import gi_version -from google.cloud.oracledatabase_v1.types import oracledatabase -from google.cloud.oracledatabase_v1.types import vm_cluster -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore from google.type import datetime_pb2 # type: ignore from google.type import dayofweek_pb2 # type: ignore from google.type import month_pb2 # type: ignore from google.type import timeofday_pb2 # type: ignore -import google.auth +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.oracledatabase_v1.services.oracle_database import ( + OracleDatabaseClient, + pagers, + transports, +) +from google.cloud.oracledatabase_v1.types import ( + autonomous_database_character_set, + autonomous_db_backup, + autonomous_db_version, + common, + db_node, + db_server, + db_system_shape, + entitlement, + exadata_infra, + gi_version, + oracledatabase, + vm_cluster, +) +from google.cloud.oracledatabase_v1.types import ( + autonomous_database as gco_autonomous_database, +) +from google.cloud.oracledatabase_v1.types import autonomous_database def client_cert_source_callback(): return b"cert bytes", b"key bytes" + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + # If default endpoint template is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint template so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) def test__get_default_mtls_endpoint(): @@ -103,98 +120,219 @@ def test__get_default_mtls_endpoint(): non_googleapi = "api.example.com" assert OracleDatabaseClient._get_default_mtls_endpoint(None) is None - assert OracleDatabaseClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert OracleDatabaseClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert OracleDatabaseClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert OracleDatabaseClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert OracleDatabaseClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + assert ( + OracleDatabaseClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + OracleDatabaseClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + OracleDatabaseClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + OracleDatabaseClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + OracleDatabaseClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + def test__read_environment_variables(): assert OracleDatabaseClient._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert OracleDatabaseClient._read_environment_variables() == (True, "auto", None) + assert OracleDatabaseClient._read_environment_variables() == ( + True, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert OracleDatabaseClient._read_environment_variables() == (False, "auto", None) + assert OracleDatabaseClient._read_environment_variables() == ( + False, + "auto", + None, + ) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): with pytest.raises(ValueError) as excinfo: OracleDatabaseClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert OracleDatabaseClient._read_environment_variables() == (False, "never", None) + assert OracleDatabaseClient._read_environment_variables() == ( + False, + "never", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert OracleDatabaseClient._read_environment_variables() == (False, "always", None) + assert OracleDatabaseClient._read_environment_variables() == ( + False, + "always", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert OracleDatabaseClient._read_environment_variables() == (False, "auto", None) + assert OracleDatabaseClient._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: OracleDatabaseClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert OracleDatabaseClient._read_environment_variables() == (False, "auto", "foo.com") + assert OracleDatabaseClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() assert OracleDatabaseClient._get_client_cert_source(None, False) is None - assert OracleDatabaseClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert OracleDatabaseClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + assert ( + OracleDatabaseClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + OracleDatabaseClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + OracleDatabaseClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + OracleDatabaseClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert OracleDatabaseClient._get_client_cert_source(None, True) is mock_default_cert_source - assert OracleDatabaseClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source -@mock.patch.object(OracleDatabaseClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(OracleDatabaseClient)) +@mock.patch.object( + OracleDatabaseClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OracleDatabaseClient), +) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() default_universe = OracleDatabaseClient._DEFAULT_UNIVERSE - default_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) - assert OracleDatabaseClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert OracleDatabaseClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == OracleDatabaseClient.DEFAULT_MTLS_ENDPOINT - assert OracleDatabaseClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert OracleDatabaseClient._get_api_endpoint(None, None, default_universe, "always") == OracleDatabaseClient.DEFAULT_MTLS_ENDPOINT - assert OracleDatabaseClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == OracleDatabaseClient.DEFAULT_MTLS_ENDPOINT - assert OracleDatabaseClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert OracleDatabaseClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + assert ( + OracleDatabaseClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + OracleDatabaseClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == OracleDatabaseClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + OracleDatabaseClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + OracleDatabaseClient._get_api_endpoint(None, None, default_universe, "always") + == OracleDatabaseClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + OracleDatabaseClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == OracleDatabaseClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + OracleDatabaseClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + OracleDatabaseClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) with pytest.raises(MutualTLSChannelError) as excinfo: - OracleDatabaseClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + OracleDatabaseClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) def test__get_universe_domain(): client_universe_domain = "foo.com" universe_domain_env = "bar.com" - assert OracleDatabaseClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert OracleDatabaseClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert OracleDatabaseClient._get_universe_domain(None, None) == OracleDatabaseClient._DEFAULT_UNIVERSE + assert ( + OracleDatabaseClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + OracleDatabaseClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + OracleDatabaseClient._get_universe_domain(None, None) + == OracleDatabaseClient._DEFAULT_UNIVERSE + ) with pytest.raises(ValueError) as excinfo: OracleDatabaseClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest"), -]) + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest"), + ], +) def test__validate_universe_domain(client_class, transport_class, transport_name): client = client_class( - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) ) assert client._validate_universe_domain() == True @@ -204,15 +342,19 @@ def test__validate_universe_domain(client_class, transport_class, transport_name if transport_name == "grpc": # Test the case where credentials are provided by the # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) client = client_class(transport=transport_class(channel=channel)) assert client._validate_universe_domain() == True # Test the case where credentials do not exist: e.g. a transport is provided # with no credentials. Validation should still succeed because there is no # mismatch with non-existent credentials. - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - transport=transport_class(channel=channel) + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) transport._credentials = None client = client_class(transport=transport) assert client._validate_universe_domain() == True @@ -220,40 +362,58 @@ def test__validate_universe_domain(client_class, transport_class, transport_name # TODO: This is needed to cater for older versions of google-auth # Make this test unconditional once the minimum supported version of # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]] + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): credentials = ga_credentials.AnonymousCredentials() credentials._universe_domain = "foo.com" # Test the case when there is a universe mismatch from the credentials. - client = client_class( - transport=transport_class(credentials=credentials) - ) + client = client_class(transport=transport_class(credentials=credentials)) with pytest.raises(ValueError) as excinfo: client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) # Test the case when there is a universe mismatch from the client. # # TODO: Make this test unconditional once the minimum supported version of # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]] + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) with pytest.raises(ValueError) as excinfo: client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) # Test that ValueError is raised if universe_domain is provided via client options and credentials is None with pytest.raises(ValueError): client._compare_universes("foo.bar", None) -@pytest.mark.parametrize("client_class,transport_name", [ - (OracleDatabaseClient, "rest"), -]) +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (OracleDatabaseClient, "rest"), + ], +) def test_oracle_database_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: factory.return_value = creds info = {"valid": True} client = client_class.from_service_account_info(info, transport=transport_name) @@ -261,48 +421,64 @@ def test_oracle_database_client_from_service_account_info(client_class, transpor assert isinstance(client, client_class) assert client.transport._host == ( - 'oracledatabase.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://oracledatabase.googleapis.com' + "oracledatabase.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://oracledatabase.googleapis.com" ) -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.OracleDatabaseRestTransport, "rest"), -]) -def test_oracle_database_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.OracleDatabaseRestTransport, "rest"), + ], +) +def test_oracle_database_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=False) use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class,transport_name", [ - (OracleDatabaseClient, "rest"), -]) +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (OracleDatabaseClient, "rest"), + ], +) def test_oracle_database_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) assert client.transport._host == ( - 'oracledatabase.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://oracledatabase.googleapis.com' + "oracledatabase.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://oracledatabase.googleapis.com" ) @@ -317,27 +493,34 @@ def test_oracle_database_client_get_transport_class(): assert transport == transports.OracleDatabaseRestTransport -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest"), -]) -@mock.patch.object(OracleDatabaseClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(OracleDatabaseClient)) -def test_oracle_database_client_client_options(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest"), + ], +) +@mock.patch.object( + OracleDatabaseClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OracleDatabaseClient), +) +def test_oracle_database_client_client_options( + client_class, transport_class, transport_name +): # Check that if channel is provided we won't create a new one. - with mock.patch.object(OracleDatabaseClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) + with mock.patch.object(OracleDatabaseClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch.object(OracleDatabaseClient, 'get_transport_class') as gtc: + with mock.patch.object(OracleDatabaseClient, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( @@ -355,13 +538,15 @@ def test_oracle_database_client_client_options(client_class, transport_class, tr # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -373,7 +558,7 @@ def test_oracle_database_client_client_options(client_class, transport_class, tr # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( @@ -393,23 +578,33 @@ def test_oracle_database_client_client_options(client_class, transport_class, tr with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -418,43 +613,63 @@ def test_oracle_database_client_client_options(client_class, transport_class, tr api_audience=None, ) # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest", "true"), - (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest", "false"), -]) -@mock.patch.object(OracleDatabaseClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(OracleDatabaseClient)) + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest", "true"), + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + OracleDatabaseClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OracleDatabaseClient), +) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_oracle_database_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): +def test_oracle_database_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. # Check the case client_cert_source is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -473,12 +688,22 @@ def test_oracle_database_client_mtls_env_auto(client_class, transport_class, tra # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -499,15 +724,22 @@ def test_oracle_database_client_mtls_env_auto(client_class, transport_class, tra ) # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -517,18 +749,24 @@ def test_oracle_database_client_mtls_env_auto(client_class, transport_class, tra ) -@pytest.mark.parametrize("client_class", [ - OracleDatabaseClient -]) -@mock.patch.object(OracleDatabaseClient, "DEFAULT_ENDPOINT", modify_default_endpoint(OracleDatabaseClient)) +@pytest.mark.parametrize("client_class", [OracleDatabaseClient]) +@mock.patch.object( + OracleDatabaseClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(OracleDatabaseClient), +) def test_oracle_database_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source == mock_client_cert_source @@ -536,8 +774,12 @@ def test_oracle_database_client_get_mtls_endpoint_and_cert_source(client_class): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source is None @@ -555,16 +797,28 @@ def test_oracle_database_client_get_mtls_endpoint_and_cert_source(client_class): # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_ENDPOINT assert cert_source is None # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source @@ -574,33 +828,55 @@ def test_oracle_database_client_get_mtls_endpoint_and_cert_source(client_class): with pytest.raises(MutualTLSChannelError) as excinfo: client_class.get_mtls_endpoint_and_cert_source() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): with pytest.raises(ValueError) as excinfo: client_class.get_mtls_endpoint_and_cert_source() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + -@pytest.mark.parametrize("client_class", [ - OracleDatabaseClient -]) -@mock.patch.object(OracleDatabaseClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(OracleDatabaseClient)) +@pytest.mark.parametrize("client_class", [OracleDatabaseClient]) +@mock.patch.object( + OracleDatabaseClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OracleDatabaseClient), +) def test_oracle_database_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" default_universe = OracleDatabaseClient._DEFAULT_UNIVERSE - default_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = OracleDatabaseClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", @@ -623,11 +899,19 @@ def test_oracle_database_client_client_api_endpoint(client_class): universe_exists = hasattr(options, "universe_domain") if universe_exists: options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. @@ -635,25 +919,34 @@ def test_oracle_database_client_client_api_endpoint(client_class): if hasattr(options, "universe_domain"): delattr(options, "universe_domain") with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) assert client.api_endpoint == default_endpoint -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest"), -]) -def test_oracle_database_client_client_options_scopes(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest"), + ], +) +def test_oracle_database_client_client_options_scopes( + client_class, transport_class, transport_name +): # Check the case scopes are provided. options = client_options.ClientOptions( scopes=["1", "2"], ) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -662,22 +955,28 @@ def test_oracle_database_client_client_options_scopes(client_class, transport_cl api_audience=None, ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest", None), -]) -def test_oracle_database_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport, "rest", None), + ], +) +def test_oracle_database_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -687,10 +986,13 @@ def test_oracle_database_client_client_options_credentials_file(client_class, tr ) -@pytest.mark.parametrize("request_type", [ - oracledatabase.ListCloudExadataInfrastructuresRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListCloudExadataInfrastructuresRequest, + dict, + ], +) def test_list_cloud_exadata_infrastructures_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -698,30 +1000,33 @@ def test_list_cloud_exadata_infrastructures_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListCloudExadataInfrastructuresResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = oracledatabase.ListCloudExadataInfrastructuresResponse.pb(return_value) + return_value = oracledatabase.ListCloudExadataInfrastructuresResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_cloud_exadata_infrastructures(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListCloudExadataInfrastructuresPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" + def test_list_cloud_exadata_infrastructures_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -737,12 +1042,19 @@ def test_list_cloud_exadata_infrastructures_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_cloud_exadata_infrastructures in client._transport._wrapped_methods + assert ( + client._transport.list_cloud_exadata_infrastructures + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_cloud_exadata_infrastructures] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_cloud_exadata_infrastructures + ] = mock_rpc request = {} client.list_cloud_exadata_infrastructures(request) @@ -757,57 +1069,67 @@ def test_list_cloud_exadata_infrastructures_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_cloud_exadata_infrastructures_rest_required_fields(request_type=oracledatabase.ListCloudExadataInfrastructuresRequest): +def test_list_cloud_exadata_infrastructures_rest_required_fields( + request_type=oracledatabase.ListCloudExadataInfrastructuresRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_cloud_exadata_infrastructures._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_cloud_exadata_infrastructures._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_cloud_exadata_infrastructures._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_cloud_exadata_infrastructures._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = oracledatabase.ListCloudExadataInfrastructuresResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -815,42 +1137,65 @@ def test_list_cloud_exadata_infrastructures_rest_required_fields(request_type=or response_value.status_code = 200 # Convert return value to protobuf type - return_value = oracledatabase.ListCloudExadataInfrastructuresResponse.pb(return_value) + return_value = oracledatabase.ListCloudExadataInfrastructuresResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_cloud_exadata_infrastructures(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_cloud_exadata_infrastructures_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - unset_fields = transport.list_cloud_exadata_infrastructures._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + unset_fields = ( + transport.list_cloud_exadata_infrastructures._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_cloud_exadata_infrastructures_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_cloud_exadata_infrastructures") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_cloud_exadata_infrastructures") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "post_list_cloud_exadata_infrastructures", + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "pre_list_cloud_exadata_infrastructures", + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.ListCloudExadataInfrastructuresRequest.pb(oracledatabase.ListCloudExadataInfrastructuresRequest()) + pb_message = oracledatabase.ListCloudExadataInfrastructuresRequest.pb( + oracledatabase.ListCloudExadataInfrastructuresRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -861,34 +1206,49 @@ def test_list_cloud_exadata_infrastructures_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = oracledatabase.ListCloudExadataInfrastructuresResponse.to_json(oracledatabase.ListCloudExadataInfrastructuresResponse()) + req.return_value._content = ( + oracledatabase.ListCloudExadataInfrastructuresResponse.to_json( + oracledatabase.ListCloudExadataInfrastructuresResponse() + ) + ) request = oracledatabase.ListCloudExadataInfrastructuresRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = oracledatabase.ListCloudExadataInfrastructuresResponse() - client.list_cloud_exadata_infrastructures(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_cloud_exadata_infrastructures( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_list_cloud_exadata_infrastructures_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListCloudExadataInfrastructuresRequest): +def test_list_cloud_exadata_infrastructures_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.ListCloudExadataInfrastructuresRequest, +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -904,16 +1264,16 @@ def test_list_cloud_exadata_infrastructures_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListCloudExadataInfrastructuresResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -921,9 +1281,11 @@ def test_list_cloud_exadata_infrastructures_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = oracledatabase.ListCloudExadataInfrastructuresResponse.pb(return_value) + return_value = oracledatabase.ListCloudExadataInfrastructuresResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.list_cloud_exadata_infrastructures(**mock_args) @@ -932,10 +1294,16 @@ def test_list_cloud_exadata_infrastructures_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/cloudExadataInfrastructures" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/cloudExadataInfrastructures" + % client.transport._host, + args[1], + ) -def test_list_cloud_exadata_infrastructures_rest_flattened_error(transport: str = 'rest'): +def test_list_cloud_exadata_infrastructures_rest_flattened_error( + transport: str = "rest", +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -946,20 +1314,20 @@ def test_list_cloud_exadata_infrastructures_rest_flattened_error(transport: str with pytest.raises(ValueError): client.list_cloud_exadata_infrastructures( oracledatabase.ListCloudExadataInfrastructuresRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_cloud_exadata_infrastructures_rest_pager(transport: str = 'rest'): +def test_list_cloud_exadata_infrastructures_rest_pager(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( oracledatabase.ListCloudExadataInfrastructuresResponse( @@ -968,17 +1336,17 @@ def test_list_cloud_exadata_infrastructures_rest_pager(transport: str = 'rest'): exadata_infra.CloudExadataInfrastructure(), exadata_infra.CloudExadataInfrastructure(), ], - next_page_token='abc', + next_page_token="abc", ), oracledatabase.ListCloudExadataInfrastructuresResponse( cloud_exadata_infrastructures=[], - next_page_token='def', + next_page_token="def", ), oracledatabase.ListCloudExadataInfrastructuresResponse( cloud_exadata_infrastructures=[ exadata_infra.CloudExadataInfrastructure(), ], - next_page_token='ghi', + next_page_token="ghi", ), oracledatabase.ListCloudExadataInfrastructuresResponse( cloud_exadata_infrastructures=[ @@ -991,31 +1359,40 @@ def test_list_cloud_exadata_infrastructures_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(oracledatabase.ListCloudExadataInfrastructuresResponse.to_json(x) for x in response) + response = tuple( + oracledatabase.ListCloudExadataInfrastructuresResponse.to_json(x) + for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} pager = client.list_cloud_exadata_infrastructures(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, exadata_infra.CloudExadataInfrastructure) - for i in results) + assert all( + isinstance(i, exadata_infra.CloudExadataInfrastructure) for i in results + ) - pages = list(client.list_cloud_exadata_infrastructures(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + pages = list( + client.list_cloud_exadata_infrastructures(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - oracledatabase.GetCloudExadataInfrastructureRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.GetCloudExadataInfrastructureRequest, + dict, + ], +) def test_get_cloud_exadata_infrastructure_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1023,17 +1400,19 @@ def test_get_cloud_exadata_infrastructure_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = exadata_infra.CloudExadataInfrastructure( - name='name_value', - display_name='display_name_value', - gcp_oracle_zone='gcp_oracle_zone_value', - entitlement_id='entitlement_id_value', + name="name_value", + display_name="display_name_value", + gcp_oracle_zone="gcp_oracle_zone_value", + entitlement_id="entitlement_id_value", ) # Wrap the value into a proper Response obj @@ -1043,16 +1422,17 @@ def test_get_cloud_exadata_infrastructure_rest(request_type): return_value = exadata_infra.CloudExadataInfrastructure.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_cloud_exadata_infrastructure(request) # Establish that the response is the type that we expect. assert isinstance(response, exadata_infra.CloudExadataInfrastructure) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.gcp_oracle_zone == 'gcp_oracle_zone_value' - assert response.entitlement_id == 'entitlement_id_value' + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.gcp_oracle_zone == "gcp_oracle_zone_value" + assert response.entitlement_id == "entitlement_id_value" + def test_get_cloud_exadata_infrastructure_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1068,12 +1448,19 @@ def test_get_cloud_exadata_infrastructure_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_cloud_exadata_infrastructure in client._transport._wrapped_methods + assert ( + client._transport.get_cloud_exadata_infrastructure + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_cloud_exadata_infrastructure] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_cloud_exadata_infrastructure + ] = mock_rpc request = {} client.get_cloud_exadata_infrastructure(request) @@ -1088,55 +1475,60 @@ def test_get_cloud_exadata_infrastructure_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_get_cloud_exadata_infrastructure_rest_required_fields(request_type=oracledatabase.GetCloudExadataInfrastructureRequest): +def test_get_cloud_exadata_infrastructure_rest_required_fields( + request_type=oracledatabase.GetCloudExadataInfrastructureRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = exadata_infra.CloudExadataInfrastructure() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -1147,39 +1539,51 @@ def test_get_cloud_exadata_infrastructure_rest_required_fields(request_type=orac return_value = exadata_infra.CloudExadataInfrastructure.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_cloud_exadata_infrastructure(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_get_cloud_exadata_infrastructure_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - unset_fields = transport.get_cloud_exadata_infrastructure._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + unset_fields = ( + transport.get_cloud_exadata_infrastructure._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_cloud_exadata_infrastructure_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_get_cloud_exadata_infrastructure") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_get_cloud_exadata_infrastructure") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "post_get_cloud_exadata_infrastructure", + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_get_cloud_exadata_infrastructure" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.GetCloudExadataInfrastructureRequest.pb(oracledatabase.GetCloudExadataInfrastructureRequest()) + pb_message = oracledatabase.GetCloudExadataInfrastructureRequest.pb( + oracledatabase.GetCloudExadataInfrastructureRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -1190,34 +1594,49 @@ def test_get_cloud_exadata_infrastructure_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = exadata_infra.CloudExadataInfrastructure.to_json(exadata_infra.CloudExadataInfrastructure()) + req.return_value._content = exadata_infra.CloudExadataInfrastructure.to_json( + exadata_infra.CloudExadataInfrastructure() + ) request = oracledatabase.GetCloudExadataInfrastructureRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = exadata_infra.CloudExadataInfrastructure() - client.get_cloud_exadata_infrastructure(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_cloud_exadata_infrastructure( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_get_cloud_exadata_infrastructure_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.GetCloudExadataInfrastructureRequest): +def test_get_cloud_exadata_infrastructure_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.GetCloudExadataInfrastructureRequest, +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -1233,16 +1652,18 @@ def test_get_cloud_exadata_infrastructure_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = exadata_infra.CloudExadataInfrastructure() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -1252,7 +1673,7 @@ def test_get_cloud_exadata_infrastructure_rest_flattened(): # Convert return value to protobuf type return_value = exadata_infra.CloudExadataInfrastructure.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.get_cloud_exadata_infrastructure(**mock_args) @@ -1261,10 +1682,14 @@ def test_get_cloud_exadata_infrastructure_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/cloudExadataInfrastructures/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/cloudExadataInfrastructures/*}" + % client.transport._host, + args[1], + ) -def test_get_cloud_exadata_infrastructure_rest_flattened_error(transport: str = 'rest'): +def test_get_cloud_exadata_infrastructure_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1275,21 +1700,23 @@ def test_get_cloud_exadata_infrastructure_rest_flattened_error(transport: str = with pytest.raises(ValueError): client.get_cloud_exadata_infrastructure( oracledatabase.GetCloudExadataInfrastructureRequest(), - name='name_value', + name="name_value", ) def test_get_cloud_exadata_infrastructure_rest_error(): client = OracleDatabaseClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) -@pytest.mark.parametrize("request_type", [ - oracledatabase.CreateCloudExadataInfrastructureRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.CreateCloudExadataInfrastructureRequest, + dict, + ], +) def test_create_cloud_exadata_infrastructure_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1297,14 +1724,62 @@ def test_create_cloud_exadata_infrastructure_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["cloud_exadata_infrastructure"] = {'name': 'name_value', 'display_name': 'display_name_value', 'gcp_oracle_zone': 'gcp_oracle_zone_value', 'entitlement_id': 'entitlement_id_value', 'properties': {'ocid': 'ocid_value', 'compute_count': 1413, 'storage_count': 1405, 'total_storage_size_gb': 2234, 'available_storage_size_gb': 2615, 'maintenance_window': {'preference': 1, 'months': [1], 'weeks_of_month': [1497, 1498], 'days_of_week': [1], 'hours_of_day': [1283, 1284], 'lead_time_week': 1455, 'patching_mode': 1, 'custom_action_timeout_mins': 2804, 'is_custom_action_timeout_enabled': True}, 'state': 1, 'shape': 'shape_value', 'oci_url': 'oci_url_value', 'cpu_count': 976, 'max_cpu_count': 1397, 'memory_size_gb': 1499, 'max_memory_gb': 1382, 'db_node_storage_size_gb': 2401, 'max_db_node_storage_size_gb': 2822, 'data_storage_size_tb': 0.2109, 'max_data_storage_tb': 0.19920000000000002, 'activated_storage_count': 2449, 'additional_storage_count': 2549, 'db_server_version': 'db_server_version_value', 'storage_server_version': 'storage_server_version_value', 'next_maintenance_run_id': 'next_maintenance_run_id_value', 'next_maintenance_run_time': {'seconds': 751, 'nanos': 543}, 'next_security_maintenance_run_time': {}, 'customer_contacts': [{'email': 'email_value'}], 'monthly_storage_server_version': 'monthly_storage_server_version_value', 'monthly_db_server_version': 'monthly_db_server_version_value'}, 'labels': {}, 'create_time': {}} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["cloud_exadata_infrastructure"] = { + "name": "name_value", + "display_name": "display_name_value", + "gcp_oracle_zone": "gcp_oracle_zone_value", + "entitlement_id": "entitlement_id_value", + "properties": { + "ocid": "ocid_value", + "compute_count": 1413, + "storage_count": 1405, + "total_storage_size_gb": 2234, + "available_storage_size_gb": 2615, + "maintenance_window": { + "preference": 1, + "months": [1], + "weeks_of_month": [1497, 1498], + "days_of_week": [1], + "hours_of_day": [1283, 1284], + "lead_time_week": 1455, + "patching_mode": 1, + "custom_action_timeout_mins": 2804, + "is_custom_action_timeout_enabled": True, + }, + "state": 1, + "shape": "shape_value", + "oci_url": "oci_url_value", + "cpu_count": 976, + "max_cpu_count": 1397, + "memory_size_gb": 1499, + "max_memory_gb": 1382, + "db_node_storage_size_gb": 2401, + "max_db_node_storage_size_gb": 2822, + "data_storage_size_tb": 0.2109, + "max_data_storage_tb": 0.19920000000000002, + "activated_storage_count": 2449, + "additional_storage_count": 2549, + "db_server_version": "db_server_version_value", + "storage_server_version": "storage_server_version_value", + "next_maintenance_run_id": "next_maintenance_run_id_value", + "next_maintenance_run_time": {"seconds": 751, "nanos": 543}, + "next_security_maintenance_run_time": {}, + "customer_contacts": [{"email": "email_value"}], + "monthly_storage_server_version": "monthly_storage_server_version_value", + "monthly_db_server_version": "monthly_db_server_version_value", + }, + "labels": {}, + "create_time": {}, + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = oracledatabase.CreateCloudExadataInfrastructureRequest.meta.fields["cloud_exadata_infrastructure"] + test_field = oracledatabase.CreateCloudExadataInfrastructureRequest.meta.fields[ + "cloud_exadata_infrastructure" + ] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -1318,7 +1793,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -1332,7 +1807,9 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["cloud_exadata_infrastructure"].items(): # pragma: NO COVER + for field, value in request_init[ + "cloud_exadata_infrastructure" + ].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -1347,40 +1824,47 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["cloud_exadata_infrastructure"][field])): + for i in range( + 0, len(request_init["cloud_exadata_infrastructure"][field]) + ): del request_init["cloud_exadata_infrastructure"][field][i][subfield] else: del request_init["cloud_exadata_infrastructure"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.create_cloud_exadata_infrastructure(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" + def test_create_cloud_exadata_infrastructure_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1395,12 +1879,19 @@ def test_create_cloud_exadata_infrastructure_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_cloud_exadata_infrastructure in client._transport._wrapped_methods + assert ( + client._transport.create_cloud_exadata_infrastructure + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_cloud_exadata_infrastructure] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_cloud_exadata_infrastructure + ] = mock_rpc request = {} client.create_cloud_exadata_infrastructure(request) @@ -1419,7 +1910,9 @@ def test_create_cloud_exadata_infrastructure_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_create_cloud_exadata_infrastructure_rest_required_fields(request_type=oracledatabase.CreateCloudExadataInfrastructureRequest): +def test_create_cloud_exadata_infrastructure_rest_required_fields( + request_type=oracledatabase.CreateCloudExadataInfrastructureRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} @@ -1427,65 +1920,81 @@ def test_create_cloud_exadata_infrastructure_rest_required_fields(request_type=o request_init["cloud_exadata_infrastructure_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "cloudExadataInfrastructureId" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "cloudExadataInfrastructureId" in jsonified_request - assert jsonified_request["cloudExadataInfrastructureId"] == request_init["cloud_exadata_infrastructure_id"] + assert ( + jsonified_request["cloudExadataInfrastructureId"] + == request_init["cloud_exadata_infrastructure_id"] + ) - jsonified_request["parent"] = 'parent_value' - jsonified_request["cloudExadataInfrastructureId"] = 'cloud_exadata_infrastructure_id_value' + jsonified_request["parent"] = "parent_value" + jsonified_request[ + "cloudExadataInfrastructureId" + ] = "cloud_exadata_infrastructure_id_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("cloud_exadata_infrastructure_id", "request_id", )) + assert not set(unset_fields) - set( + ( + "cloud_exadata_infrastructure_id", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" assert "cloudExadataInfrastructureId" in jsonified_request - assert jsonified_request["cloudExadataInfrastructureId"] == 'cloud_exadata_infrastructure_id_value' + assert ( + jsonified_request["cloudExadataInfrastructureId"] + == "cloud_exadata_infrastructure_id_value" + ) client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.create_cloud_exadata_infrastructure(request) @@ -1495,34 +2004,64 @@ def test_create_cloud_exadata_infrastructure_rest_required_fields(request_type=o "cloudExadataInfrastructureId", "", ), - ('$alt', 'json;enum-encoding=int') + ("$alt", "json;enum-encoding=int"), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_create_cloud_exadata_infrastructure_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - unset_fields = transport.create_cloud_exadata_infrastructure._get_unset_required_fields({}) - assert set(unset_fields) == (set(("cloudExadataInfrastructureId", "requestId", )) & set(("parent", "cloudExadataInfrastructureId", "cloudExadataInfrastructure", ))) + unset_fields = ( + transport.create_cloud_exadata_infrastructure._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "cloudExadataInfrastructureId", + "requestId", + ) + ) + & set( + ( + "parent", + "cloudExadataInfrastructureId", + "cloudExadataInfrastructure", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_create_cloud_exadata_infrastructure_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_create_cloud_exadata_infrastructure") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_create_cloud_exadata_infrastructure") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "post_create_cloud_exadata_infrastructure", + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "pre_create_cloud_exadata_infrastructure", + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.CreateCloudExadataInfrastructureRequest.pb(oracledatabase.CreateCloudExadataInfrastructureRequest()) + pb_message = oracledatabase.CreateCloudExadataInfrastructureRequest.pb( + oracledatabase.CreateCloudExadataInfrastructureRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -1533,34 +2072,47 @@ def test_create_cloud_exadata_infrastructure_rest_interceptors(null_interceptor) req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) request = oracledatabase.CreateCloudExadataInfrastructureRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_cloud_exadata_infrastructure(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.create_cloud_exadata_infrastructure( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_create_cloud_exadata_infrastructure_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.CreateCloudExadataInfrastructureRequest): +def test_create_cloud_exadata_infrastructure_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.CreateCloudExadataInfrastructureRequest, +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -1576,18 +2128,20 @@ def test_create_cloud_exadata_infrastructure_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', - cloud_exadata_infrastructure=exadata_infra.CloudExadataInfrastructure(name='name_value'), - cloud_exadata_infrastructure_id='cloud_exadata_infrastructure_id_value', + parent="parent_value", + cloud_exadata_infrastructure=exadata_infra.CloudExadataInfrastructure( + name="name_value" + ), + cloud_exadata_infrastructure_id="cloud_exadata_infrastructure_id_value", ) mock_args.update(sample_request) @@ -1595,7 +2149,7 @@ def test_create_cloud_exadata_infrastructure_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.create_cloud_exadata_infrastructure(**mock_args) @@ -1604,10 +2158,16 @@ def test_create_cloud_exadata_infrastructure_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/cloudExadataInfrastructures" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/cloudExadataInfrastructures" + % client.transport._host, + args[1], + ) -def test_create_cloud_exadata_infrastructure_rest_flattened_error(transport: str = 'rest'): +def test_create_cloud_exadata_infrastructure_rest_flattened_error( + transport: str = "rest", +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1618,23 +2178,27 @@ def test_create_cloud_exadata_infrastructure_rest_flattened_error(transport: str with pytest.raises(ValueError): client.create_cloud_exadata_infrastructure( oracledatabase.CreateCloudExadataInfrastructureRequest(), - parent='parent_value', - cloud_exadata_infrastructure=exadata_infra.CloudExadataInfrastructure(name='name_value'), - cloud_exadata_infrastructure_id='cloud_exadata_infrastructure_id_value', + parent="parent_value", + cloud_exadata_infrastructure=exadata_infra.CloudExadataInfrastructure( + name="name_value" + ), + cloud_exadata_infrastructure_id="cloud_exadata_infrastructure_id_value", ) def test_create_cloud_exadata_infrastructure_rest_error(): client = OracleDatabaseClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) -@pytest.mark.parametrize("request_type", [ - oracledatabase.DeleteCloudExadataInfrastructureRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.DeleteCloudExadataInfrastructureRequest, + dict, + ], +) def test_delete_cloud_exadata_infrastructure_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1642,26 +2206,29 @@ def test_delete_cloud_exadata_infrastructure_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_cloud_exadata_infrastructure(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" + def test_delete_cloud_exadata_infrastructure_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1676,12 +2243,19 @@ def test_delete_cloud_exadata_infrastructure_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_cloud_exadata_infrastructure in client._transport._wrapped_methods + assert ( + client._transport.delete_cloud_exadata_infrastructure + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_cloud_exadata_infrastructure] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_cloud_exadata_infrastructure + ] = mock_rpc request = {} client.delete_cloud_exadata_infrastructure(request) @@ -1700,57 +2274,67 @@ def test_delete_cloud_exadata_infrastructure_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_delete_cloud_exadata_infrastructure_rest_required_fields(request_type=oracledatabase.DeleteCloudExadataInfrastructureRequest): +def test_delete_cloud_exadata_infrastructure_rest_required_fields( + request_type=oracledatabase.DeleteCloudExadataInfrastructureRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cloud_exadata_infrastructure._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("force", "request_id", )) + assert not set(unset_fields) - set( + ( + "force", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -1758,40 +2342,62 @@ def test_delete_cloud_exadata_infrastructure_rest_required_fields(request_type=o response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_cloud_exadata_infrastructure(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_delete_cloud_exadata_infrastructure_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - unset_fields = transport.delete_cloud_exadata_infrastructure._get_unset_required_fields({}) - assert set(unset_fields) == (set(("force", "requestId", )) & set(("name", ))) + unset_fields = ( + transport.delete_cloud_exadata_infrastructure._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "force", + "requestId", + ) + ) + & set(("name",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_delete_cloud_exadata_infrastructure_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_delete_cloud_exadata_infrastructure") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_delete_cloud_exadata_infrastructure") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "post_delete_cloud_exadata_infrastructure", + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "pre_delete_cloud_exadata_infrastructure", + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.DeleteCloudExadataInfrastructureRequest.pb(oracledatabase.DeleteCloudExadataInfrastructureRequest()) + pb_message = oracledatabase.DeleteCloudExadataInfrastructureRequest.pb( + oracledatabase.DeleteCloudExadataInfrastructureRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -1802,34 +2408,49 @@ def test_delete_cloud_exadata_infrastructure_rest_interceptors(null_interceptor) req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) request = oracledatabase.DeleteCloudExadataInfrastructureRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_cloud_exadata_infrastructure(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.delete_cloud_exadata_infrastructure( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_delete_cloud_exadata_infrastructure_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.DeleteCloudExadataInfrastructureRequest): +def test_delete_cloud_exadata_infrastructure_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.DeleteCloudExadataInfrastructureRequest, +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -1845,16 +2466,18 @@ def test_delete_cloud_exadata_infrastructure_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -1862,7 +2485,7 @@ def test_delete_cloud_exadata_infrastructure_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.delete_cloud_exadata_infrastructure(**mock_args) @@ -1871,10 +2494,16 @@ def test_delete_cloud_exadata_infrastructure_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/cloudExadataInfrastructures/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/cloudExadataInfrastructures/*}" + % client.transport._host, + args[1], + ) -def test_delete_cloud_exadata_infrastructure_rest_flattened_error(transport: str = 'rest'): +def test_delete_cloud_exadata_infrastructure_rest_flattened_error( + transport: str = "rest", +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1885,21 +2514,23 @@ def test_delete_cloud_exadata_infrastructure_rest_flattened_error(transport: str with pytest.raises(ValueError): client.delete_cloud_exadata_infrastructure( oracledatabase.DeleteCloudExadataInfrastructureRequest(), - name='name_value', + name="name_value", ) def test_delete_cloud_exadata_infrastructure_rest_error(): client = OracleDatabaseClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) -@pytest.mark.parametrize("request_type", [ - oracledatabase.ListCloudVmClustersRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListCloudVmClustersRequest, + dict, + ], +) def test_list_cloud_vm_clusters_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1907,14 +2538,14 @@ def test_list_cloud_vm_clusters_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListCloudVmClustersResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -1924,13 +2555,14 @@ def test_list_cloud_vm_clusters_rest(request_type): return_value = oracledatabase.ListCloudVmClustersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_cloud_vm_clusters(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListCloudVmClustersPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" + def test_list_cloud_vm_clusters_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1946,12 +2578,19 @@ def test_list_cloud_vm_clusters_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_cloud_vm_clusters in client._transport._wrapped_methods + assert ( + client._transport.list_cloud_vm_clusters + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_cloud_vm_clusters] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_cloud_vm_clusters + ] = mock_rpc request = {} client.list_cloud_vm_clusters(request) @@ -1966,57 +2605,68 @@ def test_list_cloud_vm_clusters_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_cloud_vm_clusters_rest_required_fields(request_type=oracledatabase.ListCloudVmClustersRequest): +def test_list_cloud_vm_clusters_rest_required_fields( + request_type=oracledatabase.ListCloudVmClustersRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_cloud_vm_clusters._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_cloud_vm_clusters._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_cloud_vm_clusters._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_cloud_vm_clusters._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = oracledatabase.ListCloudVmClustersResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -2027,39 +2677,57 @@ def test_list_cloud_vm_clusters_rest_required_fields(request_type=oracledatabase return_value = oracledatabase.ListCloudVmClustersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_cloud_vm_clusters(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_cloud_vm_clusters_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_cloud_vm_clusters._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_cloud_vm_clusters_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_cloud_vm_clusters") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_cloud_vm_clusters") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_cloud_vm_clusters" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_cloud_vm_clusters" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.ListCloudVmClustersRequest.pb(oracledatabase.ListCloudVmClustersRequest()) + pb_message = oracledatabase.ListCloudVmClustersRequest.pb( + oracledatabase.ListCloudVmClustersRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -2070,34 +2738,46 @@ def test_list_cloud_vm_clusters_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = oracledatabase.ListCloudVmClustersResponse.to_json(oracledatabase.ListCloudVmClustersResponse()) + req.return_value._content = oracledatabase.ListCloudVmClustersResponse.to_json( + oracledatabase.ListCloudVmClustersResponse() + ) request = oracledatabase.ListCloudVmClustersRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = oracledatabase.ListCloudVmClustersResponse() - client.list_cloud_vm_clusters(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_cloud_vm_clusters( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_list_cloud_vm_clusters_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListCloudVmClustersRequest): +def test_list_cloud_vm_clusters_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListCloudVmClustersRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -2113,16 +2793,16 @@ def test_list_cloud_vm_clusters_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListCloudVmClustersResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -2132,7 +2812,7 @@ def test_list_cloud_vm_clusters_rest_flattened(): # Convert return value to protobuf type return_value = oracledatabase.ListCloudVmClustersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.list_cloud_vm_clusters(**mock_args) @@ -2141,10 +2821,14 @@ def test_list_cloud_vm_clusters_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/cloudVmClusters" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/cloudVmClusters" + % client.transport._host, + args[1], + ) -def test_list_cloud_vm_clusters_rest_flattened_error(transport: str = 'rest'): +def test_list_cloud_vm_clusters_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2155,20 +2839,20 @@ def test_list_cloud_vm_clusters_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_cloud_vm_clusters( oracledatabase.ListCloudVmClustersRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_cloud_vm_clusters_rest_pager(transport: str = 'rest'): +def test_list_cloud_vm_clusters_rest_pager(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( oracledatabase.ListCloudVmClustersResponse( @@ -2177,17 +2861,17 @@ def test_list_cloud_vm_clusters_rest_pager(transport: str = 'rest'): vm_cluster.CloudVmCluster(), vm_cluster.CloudVmCluster(), ], - next_page_token='abc', + next_page_token="abc", ), oracledatabase.ListCloudVmClustersResponse( cloud_vm_clusters=[], - next_page_token='def', + next_page_token="def", ), oracledatabase.ListCloudVmClustersResponse( cloud_vm_clusters=[ vm_cluster.CloudVmCluster(), ], - next_page_token='ghi', + next_page_token="ghi", ), oracledatabase.ListCloudVmClustersResponse( cloud_vm_clusters=[ @@ -2200,31 +2884,35 @@ def test_list_cloud_vm_clusters_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(oracledatabase.ListCloudVmClustersResponse.to_json(x) for x in response) + response = tuple( + oracledatabase.ListCloudVmClustersResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} pager = client.list_cloud_vm_clusters(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, vm_cluster.CloudVmCluster) - for i in results) + assert all(isinstance(i, vm_cluster.CloudVmCluster) for i in results) pages = list(client.list_cloud_vm_clusters(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - oracledatabase.GetCloudVmClusterRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.GetCloudVmClusterRequest, + dict, + ], +) def test_get_cloud_vm_cluster_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2232,20 +2920,22 @@ def test_get_cloud_vm_cluster_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = vm_cluster.CloudVmCluster( - name='name_value', - exadata_infrastructure='exadata_infrastructure_value', - display_name='display_name_value', - gcp_oracle_zone='gcp_oracle_zone_value', - cidr='cidr_value', - backup_subnet_cidr='backup_subnet_cidr_value', - network='network_value', + name="name_value", + exadata_infrastructure="exadata_infrastructure_value", + display_name="display_name_value", + gcp_oracle_zone="gcp_oracle_zone_value", + cidr="cidr_value", + backup_subnet_cidr="backup_subnet_cidr_value", + network="network_value", ) # Wrap the value into a proper Response obj @@ -2255,19 +2945,20 @@ def test_get_cloud_vm_cluster_rest(request_type): return_value = vm_cluster.CloudVmCluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_cloud_vm_cluster(request) # Establish that the response is the type that we expect. assert isinstance(response, vm_cluster.CloudVmCluster) - assert response.name == 'name_value' - assert response.exadata_infrastructure == 'exadata_infrastructure_value' - assert response.display_name == 'display_name_value' - assert response.gcp_oracle_zone == 'gcp_oracle_zone_value' - assert response.cidr == 'cidr_value' - assert response.backup_subnet_cidr == 'backup_subnet_cidr_value' - assert response.network == 'network_value' + assert response.name == "name_value" + assert response.exadata_infrastructure == "exadata_infrastructure_value" + assert response.display_name == "display_name_value" + assert response.gcp_oracle_zone == "gcp_oracle_zone_value" + assert response.cidr == "cidr_value" + assert response.backup_subnet_cidr == "backup_subnet_cidr_value" + assert response.network == "network_value" + def test_get_cloud_vm_cluster_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2283,12 +2974,18 @@ def test_get_cloud_vm_cluster_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_cloud_vm_cluster in client._transport._wrapped_methods + assert ( + client._transport.get_cloud_vm_cluster in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_cloud_vm_cluster] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_cloud_vm_cluster + ] = mock_rpc request = {} client.get_cloud_vm_cluster(request) @@ -2303,55 +3000,60 @@ def test_get_cloud_vm_cluster_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_get_cloud_vm_cluster_rest_required_fields(request_type=oracledatabase.GetCloudVmClusterRequest): +def test_get_cloud_vm_cluster_rest_required_fields( + request_type=oracledatabase.GetCloudVmClusterRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cloud_vm_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cloud_vm_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = vm_cluster.CloudVmCluster() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -2362,39 +3064,48 @@ def test_get_cloud_vm_cluster_rest_required_fields(request_type=oracledatabase.G return_value = vm_cluster.CloudVmCluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_cloud_vm_cluster(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_get_cloud_vm_cluster_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.get_cloud_vm_cluster._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_cloud_vm_cluster_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_get_cloud_vm_cluster") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_get_cloud_vm_cluster") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_get_cloud_vm_cluster" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_get_cloud_vm_cluster" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.GetCloudVmClusterRequest.pb(oracledatabase.GetCloudVmClusterRequest()) + pb_message = oracledatabase.GetCloudVmClusterRequest.pb( + oracledatabase.GetCloudVmClusterRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -2405,34 +3116,48 @@ def test_get_cloud_vm_cluster_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = vm_cluster.CloudVmCluster.to_json(vm_cluster.CloudVmCluster()) + req.return_value._content = vm_cluster.CloudVmCluster.to_json( + vm_cluster.CloudVmCluster() + ) request = oracledatabase.GetCloudVmClusterRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = vm_cluster.CloudVmCluster() - client.get_cloud_vm_cluster(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_cloud_vm_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_get_cloud_vm_cluster_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.GetCloudVmClusterRequest): +def test_get_cloud_vm_cluster_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.GetCloudVmClusterRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -2448,16 +3173,18 @@ def test_get_cloud_vm_cluster_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = vm_cluster.CloudVmCluster() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -2467,7 +3194,7 @@ def test_get_cloud_vm_cluster_rest_flattened(): # Convert return value to protobuf type return_value = vm_cluster.CloudVmCluster.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.get_cloud_vm_cluster(**mock_args) @@ -2476,10 +3203,14 @@ def test_get_cloud_vm_cluster_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/cloudVmClusters/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/cloudVmClusters/*}" + % client.transport._host, + args[1], + ) -def test_get_cloud_vm_cluster_rest_flattened_error(transport: str = 'rest'): +def test_get_cloud_vm_cluster_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2490,21 +3221,23 @@ def test_get_cloud_vm_cluster_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_cloud_vm_cluster( oracledatabase.GetCloudVmClusterRequest(), - name='name_value', + name="name_value", ) def test_get_cloud_vm_cluster_rest_error(): client = OracleDatabaseClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) -@pytest.mark.parametrize("request_type", [ - oracledatabase.CreateCloudVmClusterRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.CreateCloudVmClusterRequest, + dict, + ], +) def test_create_cloud_vm_cluster_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2512,14 +3245,64 @@ def test_create_cloud_vm_cluster_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["cloud_vm_cluster"] = {'name': 'name_value', 'exadata_infrastructure': 'exadata_infrastructure_value', 'display_name': 'display_name_value', 'gcp_oracle_zone': 'gcp_oracle_zone_value', 'properties': {'ocid': 'ocid_value', 'license_type': 1, 'gi_version': 'gi_version_value', 'time_zone': {'id': 'id_value', 'version': 'version_value'}, 'ssh_public_keys': ['ssh_public_keys_value1', 'ssh_public_keys_value2'], 'node_count': 1070, 'shape': 'shape_value', 'ocpu_count': 0.1087, 'memory_size_gb': 1499, 'db_node_storage_size_gb': 2401, 'storage_size_gb': 1591, 'data_storage_size_tb': 0.2109, 'disk_redundancy': 1, 'sparse_diskgroup_enabled': True, 'local_backup_enabled': True, 'hostname_prefix': 'hostname_prefix_value', 'diagnostics_data_collection_options': {'diagnostics_events_enabled': True, 'health_monitoring_enabled': True, 'incident_logs_enabled': True}, 'state': 1, 'scan_listener_port_tcp': 2356, 'scan_listener_port_tcp_ssl': 2789, 'domain': 'domain_value', 'scan_dns': 'scan_dns_value', 'hostname': 'hostname_value', 'cpu_core_count': 1496, 'system_version': 'system_version_value', 'scan_ip_ids': ['scan_ip_ids_value1', 'scan_ip_ids_value2'], 'scan_dns_record_id': 'scan_dns_record_id_value', 'oci_url': 'oci_url_value', 'db_server_ocids': ['db_server_ocids_value1', 'db_server_ocids_value2'], 'compartment_id': 'compartment_id_value', 'dns_listener_ip': 'dns_listener_ip_value', 'cluster_name': 'cluster_name_value'}, 'labels': {}, 'create_time': {'seconds': 751, 'nanos': 543}, 'cidr': 'cidr_value', 'backup_subnet_cidr': 'backup_subnet_cidr_value', 'network': 'network_value'} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["cloud_vm_cluster"] = { + "name": "name_value", + "exadata_infrastructure": "exadata_infrastructure_value", + "display_name": "display_name_value", + "gcp_oracle_zone": "gcp_oracle_zone_value", + "properties": { + "ocid": "ocid_value", + "license_type": 1, + "gi_version": "gi_version_value", + "time_zone": {"id": "id_value", "version": "version_value"}, + "ssh_public_keys": ["ssh_public_keys_value1", "ssh_public_keys_value2"], + "node_count": 1070, + "shape": "shape_value", + "ocpu_count": 0.1087, + "memory_size_gb": 1499, + "db_node_storage_size_gb": 2401, + "storage_size_gb": 1591, + "data_storage_size_tb": 0.2109, + "disk_redundancy": 1, + "sparse_diskgroup_enabled": True, + "local_backup_enabled": True, + "hostname_prefix": "hostname_prefix_value", + "diagnostics_data_collection_options": { + "diagnostics_events_enabled": True, + "health_monitoring_enabled": True, + "incident_logs_enabled": True, + }, + "state": 1, + "scan_listener_port_tcp": 2356, + "scan_listener_port_tcp_ssl": 2789, + "domain": "domain_value", + "scan_dns": "scan_dns_value", + "hostname": "hostname_value", + "cpu_core_count": 1496, + "system_version": "system_version_value", + "scan_ip_ids": ["scan_ip_ids_value1", "scan_ip_ids_value2"], + "scan_dns_record_id": "scan_dns_record_id_value", + "oci_url": "oci_url_value", + "db_server_ocids": ["db_server_ocids_value1", "db_server_ocids_value2"], + "compartment_id": "compartment_id_value", + "dns_listener_ip": "dns_listener_ip_value", + "cluster_name": "cluster_name_value", + }, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "cidr": "cidr_value", + "backup_subnet_cidr": "backup_subnet_cidr_value", + "network": "network_value", + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = oracledatabase.CreateCloudVmClusterRequest.meta.fields["cloud_vm_cluster"] + test_field = oracledatabase.CreateCloudVmClusterRequest.meta.fields[ + "cloud_vm_cluster" + ] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -2533,7 +3316,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -2547,7 +3330,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["cloud_vm_cluster"].items(): # pragma: NO COVER + for field, value in request_init["cloud_vm_cluster"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -2562,12 +3345,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -2580,22 +3367,23 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.create_cloud_vm_cluster(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" + def test_create_cloud_vm_cluster_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2610,12 +3398,19 @@ def test_create_cloud_vm_cluster_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_cloud_vm_cluster in client._transport._wrapped_methods + assert ( + client._transport.create_cloud_vm_cluster + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_cloud_vm_cluster] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_cloud_vm_cluster + ] = mock_rpc request = {} client.create_cloud_vm_cluster(request) @@ -2634,7 +3429,9 @@ def test_create_cloud_vm_cluster_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_create_cloud_vm_cluster_rest_required_fields(request_type=oracledatabase.CreateCloudVmClusterRequest): +def test_create_cloud_vm_cluster_rest_required_fields( + request_type=oracledatabase.CreateCloudVmClusterRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} @@ -2642,65 +3439,73 @@ def test_create_cloud_vm_cluster_rest_required_fields(request_type=oracledatabas request_init["cloud_vm_cluster_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "cloudVmClusterId" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cloud_vm_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "cloudVmClusterId" in jsonified_request assert jsonified_request["cloudVmClusterId"] == request_init["cloud_vm_cluster_id"] - jsonified_request["parent"] = 'parent_value' - jsonified_request["cloudVmClusterId"] = 'cloud_vm_cluster_id_value' + jsonified_request["parent"] = "parent_value" + jsonified_request["cloudVmClusterId"] = "cloud_vm_cluster_id_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cloud_vm_cluster._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("cloud_vm_cluster_id", "request_id", )) + assert not set(unset_fields) - set( + ( + "cloud_vm_cluster_id", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" assert "cloudVmClusterId" in jsonified_request - assert jsonified_request["cloudVmClusterId"] == 'cloud_vm_cluster_id_value' + assert jsonified_request["cloudVmClusterId"] == "cloud_vm_cluster_id_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.create_cloud_vm_cluster(request) @@ -2710,34 +3515,60 @@ def test_create_cloud_vm_cluster_rest_required_fields(request_type=oracledatabas "cloudVmClusterId", "", ), - ('$alt', 'json;enum-encoding=int') + ("$alt", "json;enum-encoding=int"), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_create_cloud_vm_cluster_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.create_cloud_vm_cluster._get_unset_required_fields({}) - assert set(unset_fields) == (set(("cloudVmClusterId", "requestId", )) & set(("parent", "cloudVmClusterId", "cloudVmCluster", ))) + assert set(unset_fields) == ( + set( + ( + "cloudVmClusterId", + "requestId", + ) + ) + & set( + ( + "parent", + "cloudVmClusterId", + "cloudVmCluster", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_create_cloud_vm_cluster_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_create_cloud_vm_cluster") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_create_cloud_vm_cluster") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_create_cloud_vm_cluster" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_create_cloud_vm_cluster" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.CreateCloudVmClusterRequest.pb(oracledatabase.CreateCloudVmClusterRequest()) + pb_message = oracledatabase.CreateCloudVmClusterRequest.pb( + oracledatabase.CreateCloudVmClusterRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -2748,34 +3579,46 @@ def test_create_cloud_vm_cluster_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) request = oracledatabase.CreateCloudVmClusterRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_cloud_vm_cluster(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.create_cloud_vm_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_create_cloud_vm_cluster_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.CreateCloudVmClusterRequest): +def test_create_cloud_vm_cluster_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.CreateCloudVmClusterRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -2791,18 +3634,18 @@ def test_create_cloud_vm_cluster_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', - cloud_vm_cluster=vm_cluster.CloudVmCluster(name='name_value'), - cloud_vm_cluster_id='cloud_vm_cluster_id_value', + parent="parent_value", + cloud_vm_cluster=vm_cluster.CloudVmCluster(name="name_value"), + cloud_vm_cluster_id="cloud_vm_cluster_id_value", ) mock_args.update(sample_request) @@ -2810,7 +3653,7 @@ def test_create_cloud_vm_cluster_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.create_cloud_vm_cluster(**mock_args) @@ -2819,10 +3662,14 @@ def test_create_cloud_vm_cluster_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/cloudVmClusters" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/cloudVmClusters" + % client.transport._host, + args[1], + ) -def test_create_cloud_vm_cluster_rest_flattened_error(transport: str = 'rest'): +def test_create_cloud_vm_cluster_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2833,23 +3680,25 @@ def test_create_cloud_vm_cluster_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.create_cloud_vm_cluster( oracledatabase.CreateCloudVmClusterRequest(), - parent='parent_value', - cloud_vm_cluster=vm_cluster.CloudVmCluster(name='name_value'), - cloud_vm_cluster_id='cloud_vm_cluster_id_value', + parent="parent_value", + cloud_vm_cluster=vm_cluster.CloudVmCluster(name="name_value"), + cloud_vm_cluster_id="cloud_vm_cluster_id_value", ) def test_create_cloud_vm_cluster_rest_error(): client = OracleDatabaseClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) -@pytest.mark.parametrize("request_type", [ - oracledatabase.DeleteCloudVmClusterRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.DeleteCloudVmClusterRequest, + dict, + ], +) def test_delete_cloud_vm_cluster_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2857,26 +3706,29 @@ def test_delete_cloud_vm_cluster_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_cloud_vm_cluster(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" + def test_delete_cloud_vm_cluster_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2891,12 +3743,19 @@ def test_delete_cloud_vm_cluster_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_cloud_vm_cluster in client._transport._wrapped_methods + assert ( + client._transport.delete_cloud_vm_cluster + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_cloud_vm_cluster] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_cloud_vm_cluster + ] = mock_rpc request = {} client.delete_cloud_vm_cluster(request) @@ -2915,57 +3774,67 @@ def test_delete_cloud_vm_cluster_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_delete_cloud_vm_cluster_rest_required_fields(request_type=oracledatabase.DeleteCloudVmClusterRequest): +def test_delete_cloud_vm_cluster_rest_required_fields( + request_type=oracledatabase.DeleteCloudVmClusterRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cloud_vm_cluster._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_cloud_vm_cluster._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cloud_vm_cluster._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("force", "request_id", )) + assert not set(unset_fields) - set( + ( + "force", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -2973,40 +3842,58 @@ def test_delete_cloud_vm_cluster_rest_required_fields(request_type=oracledatabas response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_cloud_vm_cluster(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_delete_cloud_vm_cluster_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.delete_cloud_vm_cluster._get_unset_required_fields({}) - assert set(unset_fields) == (set(("force", "requestId", )) & set(("name", ))) + assert set(unset_fields) == ( + set( + ( + "force", + "requestId", + ) + ) + & set(("name",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_delete_cloud_vm_cluster_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_delete_cloud_vm_cluster") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_delete_cloud_vm_cluster") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_delete_cloud_vm_cluster" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_delete_cloud_vm_cluster" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.DeleteCloudVmClusterRequest.pb(oracledatabase.DeleteCloudVmClusterRequest()) + pb_message = oracledatabase.DeleteCloudVmClusterRequest.pb( + oracledatabase.DeleteCloudVmClusterRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -3017,34 +3904,48 @@ def test_delete_cloud_vm_cluster_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) request = oracledatabase.DeleteCloudVmClusterRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_cloud_vm_cluster(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.delete_cloud_vm_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_delete_cloud_vm_cluster_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.DeleteCloudVmClusterRequest): +def test_delete_cloud_vm_cluster_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.DeleteCloudVmClusterRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -3060,16 +3961,18 @@ def test_delete_cloud_vm_cluster_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -3077,7 +3980,7 @@ def test_delete_cloud_vm_cluster_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.delete_cloud_vm_cluster(**mock_args) @@ -3086,10 +3989,14 @@ def test_delete_cloud_vm_cluster_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/cloudVmClusters/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/cloudVmClusters/*}" + % client.transport._host, + args[1], + ) -def test_delete_cloud_vm_cluster_rest_flattened_error(transport: str = 'rest'): +def test_delete_cloud_vm_cluster_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3100,21 +4007,23 @@ def test_delete_cloud_vm_cluster_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.delete_cloud_vm_cluster( oracledatabase.DeleteCloudVmClusterRequest(), - name='name_value', + name="name_value", ) def test_delete_cloud_vm_cluster_rest_error(): client = OracleDatabaseClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) -@pytest.mark.parametrize("request_type", [ - oracledatabase.ListEntitlementsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListEntitlementsRequest, + dict, + ], +) def test_list_entitlements_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3122,14 +4031,14 @@ def test_list_entitlements_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListEntitlementsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -3139,13 +4048,14 @@ def test_list_entitlements_rest(request_type): return_value = oracledatabase.ListEntitlementsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_entitlements(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListEntitlementsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" + def test_list_entitlements_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3165,8 +4075,12 @@ def test_list_entitlements_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_entitlements] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_entitlements + ] = mock_rpc request = {} client.list_entitlements(request) @@ -3181,57 +4095,67 @@ def test_list_entitlements_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_entitlements_rest_required_fields(request_type=oracledatabase.ListEntitlementsRequest): +def test_list_entitlements_rest_required_fields( + request_type=oracledatabase.ListEntitlementsRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_entitlements._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_entitlements._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_entitlements._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_entitlements._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = oracledatabase.ListEntitlementsResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -3242,39 +4166,56 @@ def test_list_entitlements_rest_required_fields(request_type=oracledatabase.List return_value = oracledatabase.ListEntitlementsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_entitlements(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_entitlements_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_entitlements._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_entitlements_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_entitlements") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_entitlements") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_entitlements" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_entitlements" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.ListEntitlementsRequest.pb(oracledatabase.ListEntitlementsRequest()) + pb_message = oracledatabase.ListEntitlementsRequest.pb( + oracledatabase.ListEntitlementsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -3285,34 +4226,46 @@ def test_list_entitlements_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = oracledatabase.ListEntitlementsResponse.to_json(oracledatabase.ListEntitlementsResponse()) + req.return_value._content = oracledatabase.ListEntitlementsResponse.to_json( + oracledatabase.ListEntitlementsResponse() + ) request = oracledatabase.ListEntitlementsRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = oracledatabase.ListEntitlementsResponse() - client.list_entitlements(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_entitlements( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_list_entitlements_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListEntitlementsRequest): +def test_list_entitlements_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListEntitlementsRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -3328,16 +4281,16 @@ def test_list_entitlements_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListEntitlementsResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -3347,7 +4300,7 @@ def test_list_entitlements_rest_flattened(): # Convert return value to protobuf type return_value = oracledatabase.ListEntitlementsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.list_entitlements(**mock_args) @@ -3356,10 +4309,14 @@ def test_list_entitlements_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/entitlements" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/entitlements" + % client.transport._host, + args[1], + ) -def test_list_entitlements_rest_flattened_error(transport: str = 'rest'): +def test_list_entitlements_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3370,20 +4327,20 @@ def test_list_entitlements_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_entitlements( oracledatabase.ListEntitlementsRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_entitlements_rest_pager(transport: str = 'rest'): +def test_list_entitlements_rest_pager(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( oracledatabase.ListEntitlementsResponse( @@ -3392,17 +4349,17 @@ def test_list_entitlements_rest_pager(transport: str = 'rest'): entitlement.Entitlement(), entitlement.Entitlement(), ], - next_page_token='abc', + next_page_token="abc", ), oracledatabase.ListEntitlementsResponse( entitlements=[], - next_page_token='def', + next_page_token="def", ), oracledatabase.ListEntitlementsResponse( entitlements=[ entitlement.Entitlement(), ], - next_page_token='ghi', + next_page_token="ghi", ), oracledatabase.ListEntitlementsResponse( entitlements=[ @@ -3415,31 +4372,35 @@ def test_list_entitlements_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(oracledatabase.ListEntitlementsResponse.to_json(x) for x in response) + response = tuple( + oracledatabase.ListEntitlementsResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} pager = client.list_entitlements(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, entitlement.Entitlement) - for i in results) + assert all(isinstance(i, entitlement.Entitlement) for i in results) pages = list(client.list_entitlements(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - oracledatabase.ListDbServersRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListDbServersRequest, + dict, + ], +) def test_list_db_servers_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3447,14 +4408,16 @@ def test_list_db_servers_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + request_init = { + "parent": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListDbServersResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -3464,13 +4427,14 @@ def test_list_db_servers_rest(request_type): return_value = oracledatabase.ListDbServersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_db_servers(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListDbServersPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" + def test_list_db_servers_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3490,7 +4454,9 @@ def test_list_db_servers_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_db_servers] = mock_rpc request = {} @@ -3506,57 +4472,67 @@ def test_list_db_servers_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_db_servers_rest_required_fields(request_type=oracledatabase.ListDbServersRequest): +def test_list_db_servers_rest_required_fields( + request_type=oracledatabase.ListDbServersRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_db_servers._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_db_servers._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_db_servers._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_db_servers._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = oracledatabase.ListDbServersResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -3567,39 +4543,56 @@ def test_list_db_servers_rest_required_fields(request_type=oracledatabase.ListDb return_value = oracledatabase.ListDbServersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_db_servers(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_db_servers_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_db_servers._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_db_servers_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_db_servers") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_db_servers") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_db_servers" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_db_servers" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.ListDbServersRequest.pb(oracledatabase.ListDbServersRequest()) + pb_message = oracledatabase.ListDbServersRequest.pb( + oracledatabase.ListDbServersRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -3610,34 +4603,48 @@ def test_list_db_servers_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = oracledatabase.ListDbServersResponse.to_json(oracledatabase.ListDbServersResponse()) + req.return_value._content = oracledatabase.ListDbServersResponse.to_json( + oracledatabase.ListDbServersResponse() + ) request = oracledatabase.ListDbServersRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = oracledatabase.ListDbServersResponse() - client.list_db_servers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_db_servers( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_list_db_servers_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListDbServersRequest): +def test_list_db_servers_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListDbServersRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + request_init = { + "parent": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -3653,16 +4660,18 @@ def test_list_db_servers_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListDbServersResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + sample_request = { + "parent": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -3672,7 +4681,7 @@ def test_list_db_servers_rest_flattened(): # Convert return value to protobuf type return_value = oracledatabase.ListDbServersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.list_db_servers(**mock_args) @@ -3681,10 +4690,14 @@ def test_list_db_servers_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/cloudExadataInfrastructures/*}/dbServers" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/cloudExadataInfrastructures/*}/dbServers" + % client.transport._host, + args[1], + ) -def test_list_db_servers_rest_flattened_error(transport: str = 'rest'): +def test_list_db_servers_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3695,20 +4708,20 @@ def test_list_db_servers_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_db_servers( oracledatabase.ListDbServersRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_db_servers_rest_pager(transport: str = 'rest'): +def test_list_db_servers_rest_pager(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( oracledatabase.ListDbServersResponse( @@ -3717,17 +4730,17 @@ def test_list_db_servers_rest_pager(transport: str = 'rest'): db_server.DbServer(), db_server.DbServer(), ], - next_page_token='abc', + next_page_token="abc", ), oracledatabase.ListDbServersResponse( db_servers=[], - next_page_token='def', + next_page_token="def", ), oracledatabase.ListDbServersResponse( db_servers=[ db_server.DbServer(), ], - next_page_token='ghi', + next_page_token="ghi", ), oracledatabase.ListDbServersResponse( db_servers=[ @@ -3740,31 +4753,37 @@ def test_list_db_servers_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(oracledatabase.ListDbServersResponse.to_json(x) for x in response) + response = tuple( + oracledatabase.ListDbServersResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3'} + sample_request = { + "parent": "projects/sample1/locations/sample2/cloudExadataInfrastructures/sample3" + } pager = client.list_db_servers(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, db_server.DbServer) - for i in results) + assert all(isinstance(i, db_server.DbServer) for i in results) pages = list(client.list_db_servers(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - oracledatabase.ListDbNodesRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListDbNodesRequest, + dict, + ], +) def test_list_db_nodes_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3772,14 +4791,16 @@ def test_list_db_nodes_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + request_init = { + "parent": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListDbNodesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -3789,13 +4810,14 @@ def test_list_db_nodes_rest(request_type): return_value = oracledatabase.ListDbNodesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_db_nodes(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListDbNodesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" + def test_list_db_nodes_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3815,7 +4837,9 @@ def test_list_db_nodes_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_db_nodes] = mock_rpc request = {} @@ -3831,57 +4855,67 @@ def test_list_db_nodes_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_db_nodes_rest_required_fields(request_type=oracledatabase.ListDbNodesRequest): +def test_list_db_nodes_rest_required_fields( + request_type=oracledatabase.ListDbNodesRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_db_nodes._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_db_nodes._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_db_nodes._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_db_nodes._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = oracledatabase.ListDbNodesResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -3892,39 +4926,56 @@ def test_list_db_nodes_rest_required_fields(request_type=oracledatabase.ListDbNo return_value = oracledatabase.ListDbNodesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_db_nodes(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_db_nodes_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_db_nodes._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_db_nodes_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_db_nodes") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_db_nodes") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_db_nodes" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_db_nodes" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.ListDbNodesRequest.pb(oracledatabase.ListDbNodesRequest()) + pb_message = oracledatabase.ListDbNodesRequest.pb( + oracledatabase.ListDbNodesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -3935,34 +4986,48 @@ def test_list_db_nodes_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = oracledatabase.ListDbNodesResponse.to_json(oracledatabase.ListDbNodesResponse()) + req.return_value._content = oracledatabase.ListDbNodesResponse.to_json( + oracledatabase.ListDbNodesResponse() + ) request = oracledatabase.ListDbNodesRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = oracledatabase.ListDbNodesResponse() - client.list_db_nodes(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_db_nodes( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_list_db_nodes_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListDbNodesRequest): +def test_list_db_nodes_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListDbNodesRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + request_init = { + "parent": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -3978,16 +5043,18 @@ def test_list_db_nodes_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListDbNodesResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + sample_request = { + "parent": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -3997,7 +5064,7 @@ def test_list_db_nodes_rest_flattened(): # Convert return value to protobuf type return_value = oracledatabase.ListDbNodesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.list_db_nodes(**mock_args) @@ -4006,10 +5073,14 @@ def test_list_db_nodes_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/cloudVmClusters/*}/dbNodes" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/cloudVmClusters/*}/dbNodes" + % client.transport._host, + args[1], + ) -def test_list_db_nodes_rest_flattened_error(transport: str = 'rest'): +def test_list_db_nodes_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4020,20 +5091,20 @@ def test_list_db_nodes_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_db_nodes( oracledatabase.ListDbNodesRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_db_nodes_rest_pager(transport: str = 'rest'): +def test_list_db_nodes_rest_pager(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( oracledatabase.ListDbNodesResponse( @@ -4042,17 +5113,17 @@ def test_list_db_nodes_rest_pager(transport: str = 'rest'): db_node.DbNode(), db_node.DbNode(), ], - next_page_token='abc', + next_page_token="abc", ), oracledatabase.ListDbNodesResponse( db_nodes=[], - next_page_token='def', + next_page_token="def", ), oracledatabase.ListDbNodesResponse( db_nodes=[ db_node.DbNode(), ], - next_page_token='ghi', + next_page_token="ghi", ), oracledatabase.ListDbNodesResponse( db_nodes=[ @@ -4065,31 +5136,37 @@ def test_list_db_nodes_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(oracledatabase.ListDbNodesResponse.to_json(x) for x in response) + response = tuple( + oracledatabase.ListDbNodesResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2/cloudVmClusters/sample3'} + sample_request = { + "parent": "projects/sample1/locations/sample2/cloudVmClusters/sample3" + } pager = client.list_db_nodes(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, db_node.DbNode) - for i in results) + assert all(isinstance(i, db_node.DbNode) for i in results) pages = list(client.list_db_nodes(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - oracledatabase.ListGiVersionsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListGiVersionsRequest, + dict, + ], +) def test_list_gi_versions_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4097,14 +5174,14 @@ def test_list_gi_versions_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListGiVersionsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -4114,13 +5191,14 @@ def test_list_gi_versions_rest(request_type): return_value = oracledatabase.ListGiVersionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_gi_versions(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListGiVersionsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" + def test_list_gi_versions_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4140,8 +5218,12 @@ def test_list_gi_versions_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_gi_versions] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_gi_versions + ] = mock_rpc request = {} client.list_gi_versions(request) @@ -4156,57 +5238,67 @@ def test_list_gi_versions_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_gi_versions_rest_required_fields(request_type=oracledatabase.ListGiVersionsRequest): +def test_list_gi_versions_rest_required_fields( + request_type=oracledatabase.ListGiVersionsRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_gi_versions._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_gi_versions._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_gi_versions._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_gi_versions._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = oracledatabase.ListGiVersionsResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -4217,39 +5309,56 @@ def test_list_gi_versions_rest_required_fields(request_type=oracledatabase.ListG return_value = oracledatabase.ListGiVersionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_gi_versions(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_gi_versions_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_gi_versions._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_gi_versions_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_gi_versions") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_gi_versions") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_gi_versions" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_gi_versions" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.ListGiVersionsRequest.pb(oracledatabase.ListGiVersionsRequest()) + pb_message = oracledatabase.ListGiVersionsRequest.pb( + oracledatabase.ListGiVersionsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -4260,34 +5369,46 @@ def test_list_gi_versions_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = oracledatabase.ListGiVersionsResponse.to_json(oracledatabase.ListGiVersionsResponse()) + req.return_value._content = oracledatabase.ListGiVersionsResponse.to_json( + oracledatabase.ListGiVersionsResponse() + ) request = oracledatabase.ListGiVersionsRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = oracledatabase.ListGiVersionsResponse() - client.list_gi_versions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_gi_versions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_list_gi_versions_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListGiVersionsRequest): +def test_list_gi_versions_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListGiVersionsRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -4303,16 +5424,16 @@ def test_list_gi_versions_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListGiVersionsResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -4322,7 +5443,7 @@ def test_list_gi_versions_rest_flattened(): # Convert return value to protobuf type return_value = oracledatabase.ListGiVersionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.list_gi_versions(**mock_args) @@ -4331,10 +5452,13 @@ def test_list_gi_versions_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/giVersions" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/giVersions" % client.transport._host, + args[1], + ) -def test_list_gi_versions_rest_flattened_error(transport: str = 'rest'): +def test_list_gi_versions_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4345,20 +5469,20 @@ def test_list_gi_versions_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_gi_versions( oracledatabase.ListGiVersionsRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_gi_versions_rest_pager(transport: str = 'rest'): +def test_list_gi_versions_rest_pager(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( oracledatabase.ListGiVersionsResponse( @@ -4367,17 +5491,17 @@ def test_list_gi_versions_rest_pager(transport: str = 'rest'): gi_version.GiVersion(), gi_version.GiVersion(), ], - next_page_token='abc', + next_page_token="abc", ), oracledatabase.ListGiVersionsResponse( gi_versions=[], - next_page_token='def', + next_page_token="def", ), oracledatabase.ListGiVersionsResponse( gi_versions=[ gi_version.GiVersion(), ], - next_page_token='ghi', + next_page_token="ghi", ), oracledatabase.ListGiVersionsResponse( gi_versions=[ @@ -4390,31 +5514,35 @@ def test_list_gi_versions_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(oracledatabase.ListGiVersionsResponse.to_json(x) for x in response) + response = tuple( + oracledatabase.ListGiVersionsResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} pager = client.list_gi_versions(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, gi_version.GiVersion) - for i in results) + assert all(isinstance(i, gi_version.GiVersion) for i in results) pages = list(client.list_gi_versions(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - oracledatabase.ListDbSystemShapesRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListDbSystemShapesRequest, + dict, + ], +) def test_list_db_system_shapes_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4422,14 +5550,14 @@ def test_list_db_system_shapes_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListDbSystemShapesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -4439,13 +5567,14 @@ def test_list_db_system_shapes_rest(request_type): return_value = oracledatabase.ListDbSystemShapesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_db_system_shapes(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListDbSystemShapesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" + def test_list_db_system_shapes_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4461,12 +5590,19 @@ def test_list_db_system_shapes_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_db_system_shapes in client._transport._wrapped_methods + assert ( + client._transport.list_db_system_shapes + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_db_system_shapes] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_db_system_shapes + ] = mock_rpc request = {} client.list_db_system_shapes(request) @@ -4481,57 +5617,67 @@ def test_list_db_system_shapes_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_db_system_shapes_rest_required_fields(request_type=oracledatabase.ListDbSystemShapesRequest): +def test_list_db_system_shapes_rest_required_fields( + request_type=oracledatabase.ListDbSystemShapesRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_db_system_shapes._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_db_system_shapes._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_db_system_shapes._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_db_system_shapes._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = oracledatabase.ListDbSystemShapesResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -4542,39 +5688,56 @@ def test_list_db_system_shapes_rest_required_fields(request_type=oracledatabase. return_value = oracledatabase.ListDbSystemShapesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_db_system_shapes(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_db_system_shapes_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_db_system_shapes._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_db_system_shapes_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_db_system_shapes") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_db_system_shapes") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_db_system_shapes" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_db_system_shapes" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.ListDbSystemShapesRequest.pb(oracledatabase.ListDbSystemShapesRequest()) + pb_message = oracledatabase.ListDbSystemShapesRequest.pb( + oracledatabase.ListDbSystemShapesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -4585,34 +5748,46 @@ def test_list_db_system_shapes_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = oracledatabase.ListDbSystemShapesResponse.to_json(oracledatabase.ListDbSystemShapesResponse()) + req.return_value._content = oracledatabase.ListDbSystemShapesResponse.to_json( + oracledatabase.ListDbSystemShapesResponse() + ) request = oracledatabase.ListDbSystemShapesRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = oracledatabase.ListDbSystemShapesResponse() - client.list_db_system_shapes(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_db_system_shapes( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_list_db_system_shapes_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListDbSystemShapesRequest): +def test_list_db_system_shapes_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListDbSystemShapesRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -4628,16 +5803,16 @@ def test_list_db_system_shapes_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListDbSystemShapesResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -4647,7 +5822,7 @@ def test_list_db_system_shapes_rest_flattened(): # Convert return value to protobuf type return_value = oracledatabase.ListDbSystemShapesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.list_db_system_shapes(**mock_args) @@ -4656,10 +5831,14 @@ def test_list_db_system_shapes_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/dbSystemShapes" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/dbSystemShapes" + % client.transport._host, + args[1], + ) -def test_list_db_system_shapes_rest_flattened_error(transport: str = 'rest'): +def test_list_db_system_shapes_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4670,20 +5849,20 @@ def test_list_db_system_shapes_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_db_system_shapes( oracledatabase.ListDbSystemShapesRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_db_system_shapes_rest_pager(transport: str = 'rest'): +def test_list_db_system_shapes_rest_pager(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( oracledatabase.ListDbSystemShapesResponse( @@ -4692,17 +5871,17 @@ def test_list_db_system_shapes_rest_pager(transport: str = 'rest'): db_system_shape.DbSystemShape(), db_system_shape.DbSystemShape(), ], - next_page_token='abc', + next_page_token="abc", ), oracledatabase.ListDbSystemShapesResponse( db_system_shapes=[], - next_page_token='def', + next_page_token="def", ), oracledatabase.ListDbSystemShapesResponse( db_system_shapes=[ db_system_shape.DbSystemShape(), ], - next_page_token='ghi', + next_page_token="ghi", ), oracledatabase.ListDbSystemShapesResponse( db_system_shapes=[ @@ -4715,31 +5894,35 @@ def test_list_db_system_shapes_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(oracledatabase.ListDbSystemShapesResponse.to_json(x) for x in response) + response = tuple( + oracledatabase.ListDbSystemShapesResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} pager = client.list_db_system_shapes(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, db_system_shape.DbSystemShape) - for i in results) + assert all(isinstance(i, db_system_shape.DbSystemShape) for i in results) pages = list(client.list_db_system_shapes(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - oracledatabase.ListAutonomousDatabasesRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListAutonomousDatabasesRequest, + dict, + ], +) def test_list_autonomous_databases_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4747,14 +5930,14 @@ def test_list_autonomous_databases_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListAutonomousDatabasesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -4764,13 +5947,14 @@ def test_list_autonomous_databases_rest(request_type): return_value = oracledatabase.ListAutonomousDatabasesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_autonomous_databases(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAutonomousDatabasesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" + def test_list_autonomous_databases_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4786,12 +5970,19 @@ def test_list_autonomous_databases_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_autonomous_databases in client._transport._wrapped_methods + assert ( + client._transport.list_autonomous_databases + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_autonomous_databases] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_autonomous_databases + ] = mock_rpc request = {} client.list_autonomous_databases(request) @@ -4806,57 +5997,69 @@ def test_list_autonomous_databases_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_autonomous_databases_rest_required_fields(request_type=oracledatabase.ListAutonomousDatabasesRequest): +def test_list_autonomous_databases_rest_required_fields( + request_type=oracledatabase.ListAutonomousDatabasesRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_autonomous_databases._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_databases._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_autonomous_databases._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_databases._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = oracledatabase.ListAutonomousDatabasesResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -4864,42 +6067,63 @@ def test_list_autonomous_databases_rest_required_fields(request_type=oracledatab response_value.status_code = 200 # Convert return value to protobuf type - return_value = oracledatabase.ListAutonomousDatabasesResponse.pb(return_value) + return_value = oracledatabase.ListAutonomousDatabasesResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_autonomous_databases(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_autonomous_databases_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_autonomous_databases._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_autonomous_databases_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_autonomous_databases") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_autonomous_databases") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_autonomous_databases" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_autonomous_databases" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.ListAutonomousDatabasesRequest.pb(oracledatabase.ListAutonomousDatabasesRequest()) + pb_message = oracledatabase.ListAutonomousDatabasesRequest.pb( + oracledatabase.ListAutonomousDatabasesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -4910,34 +6134,48 @@ def test_list_autonomous_databases_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = oracledatabase.ListAutonomousDatabasesResponse.to_json(oracledatabase.ListAutonomousDatabasesResponse()) + req.return_value._content = ( + oracledatabase.ListAutonomousDatabasesResponse.to_json( + oracledatabase.ListAutonomousDatabasesResponse() + ) + ) request = oracledatabase.ListAutonomousDatabasesRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = oracledatabase.ListAutonomousDatabasesResponse() - client.list_autonomous_databases(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_autonomous_databases( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_list_autonomous_databases_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListAutonomousDatabasesRequest): +def test_list_autonomous_databases_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListAutonomousDatabasesRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -4953,16 +6191,16 @@ def test_list_autonomous_databases_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListAutonomousDatabasesResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -4972,7 +6210,7 @@ def test_list_autonomous_databases_rest_flattened(): # Convert return value to protobuf type return_value = oracledatabase.ListAutonomousDatabasesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.list_autonomous_databases(**mock_args) @@ -4981,10 +6219,14 @@ def test_list_autonomous_databases_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/autonomousDatabases" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/autonomousDatabases" + % client.transport._host, + args[1], + ) -def test_list_autonomous_databases_rest_flattened_error(transport: str = 'rest'): +def test_list_autonomous_databases_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4995,20 +6237,20 @@ def test_list_autonomous_databases_rest_flattened_error(transport: str = 'rest') with pytest.raises(ValueError): client.list_autonomous_databases( oracledatabase.ListAutonomousDatabasesRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_autonomous_databases_rest_pager(transport: str = 'rest'): +def test_list_autonomous_databases_rest_pager(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( oracledatabase.ListAutonomousDatabasesResponse( @@ -5017,17 +6259,17 @@ def test_list_autonomous_databases_rest_pager(transport: str = 'rest'): autonomous_database.AutonomousDatabase(), autonomous_database.AutonomousDatabase(), ], - next_page_token='abc', + next_page_token="abc", ), oracledatabase.ListAutonomousDatabasesResponse( autonomous_databases=[], - next_page_token='def', + next_page_token="def", ), oracledatabase.ListAutonomousDatabasesResponse( autonomous_databases=[ autonomous_database.AutonomousDatabase(), ], - next_page_token='ghi', + next_page_token="ghi", ), oracledatabase.ListAutonomousDatabasesResponse( autonomous_databases=[ @@ -5040,31 +6282,37 @@ def test_list_autonomous_databases_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(oracledatabase.ListAutonomousDatabasesResponse.to_json(x) for x in response) + response = tuple( + oracledatabase.ListAutonomousDatabasesResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} pager = client.list_autonomous_databases(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, autonomous_database.AutonomousDatabase) - for i in results) + assert all( + isinstance(i, autonomous_database.AutonomousDatabase) for i in results + ) pages = list(client.list_autonomous_databases(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - oracledatabase.GetAutonomousDatabaseRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.GetAutonomousDatabaseRequest, + dict, + ], +) def test_get_autonomous_database_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5072,20 +6320,22 @@ def test_get_autonomous_database_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = autonomous_database.AutonomousDatabase( - name='name_value', - database='database_value', - display_name='display_name_value', - entitlement_id='entitlement_id_value', - admin_password='admin_password_value', - network='network_value', - cidr='cidr_value', + name="name_value", + database="database_value", + display_name="display_name_value", + entitlement_id="entitlement_id_value", + admin_password="admin_password_value", + network="network_value", + cidr="cidr_value", ) # Wrap the value into a proper Response obj @@ -5095,19 +6345,20 @@ def test_get_autonomous_database_rest(request_type): return_value = autonomous_database.AutonomousDatabase.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_autonomous_database(request) # Establish that the response is the type that we expect. assert isinstance(response, autonomous_database.AutonomousDatabase) - assert response.name == 'name_value' - assert response.database == 'database_value' - assert response.display_name == 'display_name_value' - assert response.entitlement_id == 'entitlement_id_value' - assert response.admin_password == 'admin_password_value' - assert response.network == 'network_value' - assert response.cidr == 'cidr_value' + assert response.name == "name_value" + assert response.database == "database_value" + assert response.display_name == "display_name_value" + assert response.entitlement_id == "entitlement_id_value" + assert response.admin_password == "admin_password_value" + assert response.network == "network_value" + assert response.cidr == "cidr_value" + def test_get_autonomous_database_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5123,12 +6374,19 @@ def test_get_autonomous_database_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_autonomous_database in client._transport._wrapped_methods + assert ( + client._transport.get_autonomous_database + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_autonomous_database] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_autonomous_database + ] = mock_rpc request = {} client.get_autonomous_database(request) @@ -5143,55 +6401,60 @@ def test_get_autonomous_database_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_get_autonomous_database_rest_required_fields(request_type=oracledatabase.GetAutonomousDatabaseRequest): +def test_get_autonomous_database_rest_required_fields( + request_type=oracledatabase.GetAutonomousDatabaseRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_autonomous_database._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_autonomous_database._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_autonomous_database._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_autonomous_database._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = autonomous_database.AutonomousDatabase() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -5202,39 +6465,48 @@ def test_get_autonomous_database_rest_required_fields(request_type=oracledatabas return_value = autonomous_database.AutonomousDatabase.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_autonomous_database(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_get_autonomous_database_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.get_autonomous_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_autonomous_database_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_get_autonomous_database") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_get_autonomous_database") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_get_autonomous_database" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_get_autonomous_database" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.GetAutonomousDatabaseRequest.pb(oracledatabase.GetAutonomousDatabaseRequest()) + pb_message = oracledatabase.GetAutonomousDatabaseRequest.pb( + oracledatabase.GetAutonomousDatabaseRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -5245,34 +6517,48 @@ def test_get_autonomous_database_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = autonomous_database.AutonomousDatabase.to_json(autonomous_database.AutonomousDatabase()) + req.return_value._content = autonomous_database.AutonomousDatabase.to_json( + autonomous_database.AutonomousDatabase() + ) request = oracledatabase.GetAutonomousDatabaseRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = autonomous_database.AutonomousDatabase() - client.get_autonomous_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_autonomous_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_get_autonomous_database_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.GetAutonomousDatabaseRequest): +def test_get_autonomous_database_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.GetAutonomousDatabaseRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -5288,16 +6574,18 @@ def test_get_autonomous_database_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = autonomous_database.AutonomousDatabase() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -5307,7 +6595,7 @@ def test_get_autonomous_database_rest_flattened(): # Convert return value to protobuf type return_value = autonomous_database.AutonomousDatabase.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.get_autonomous_database(**mock_args) @@ -5316,10 +6604,14 @@ def test_get_autonomous_database_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/autonomousDatabases/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/autonomousDatabases/*}" + % client.transport._host, + args[1], + ) -def test_get_autonomous_database_rest_flattened_error(transport: str = 'rest'): +def test_get_autonomous_database_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5330,21 +6622,23 @@ def test_get_autonomous_database_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_autonomous_database( oracledatabase.GetAutonomousDatabaseRequest(), - name='name_value', + name="name_value", ) def test_get_autonomous_database_rest_error(): client = OracleDatabaseClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) -@pytest.mark.parametrize("request_type", [ - oracledatabase.CreateAutonomousDatabaseRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.CreateAutonomousDatabaseRequest, + dict, + ], +) def test_create_autonomous_database_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5352,14 +6646,143 @@ def test_create_autonomous_database_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["autonomous_database"] = {'name': 'name_value', 'database': 'database_value', 'display_name': 'display_name_value', 'entitlement_id': 'entitlement_id_value', 'admin_password': 'admin_password_value', 'properties': {'ocid': 'ocid_value', 'compute_count': 0.1413, 'cpu_core_count': 1496, 'data_storage_size_tb': 2109, 'data_storage_size_gb': 2096, 'db_workload': 1, 'db_edition': 1, 'character_set': 'character_set_value', 'n_character_set': 'n_character_set_value', 'private_endpoint_ip': 'private_endpoint_ip_value', 'private_endpoint_label': 'private_endpoint_label_value', 'db_version': 'db_version_value', 'is_auto_scaling_enabled': True, 'is_storage_auto_scaling_enabled': True, 'license_type': 1, 'customer_contacts': [{'email': 'email_value'}], 'secret_id': 'secret_id_value', 'vault_id': 'vault_id_value', 'maintenance_schedule_type': 1, 'mtls_connection_required': True, 'backup_retention_period_days': 2975, 'actual_used_data_storage_size_tb': 0.3366, 'allocated_storage_size_tb': 0.2636, 'apex_details': {'apex_version': 'apex_version_value', 'ords_version': 'ords_version_value'}, 'are_primary_allowlisted_ips_used': True, 'lifecycle_details': 'lifecycle_details_value', 'state': 1, 'autonomous_container_database_id': 'autonomous_container_database_id_value', 'available_upgrade_versions': ['available_upgrade_versions_value1', 'available_upgrade_versions_value2'], 'connection_strings': {'all_connection_strings': {'high': 'high_value', 'low': 'low_value', 'medium': 'medium_value'}, 'dedicated': 'dedicated_value', 'high': 'high_value', 'low': 'low_value', 'medium': 'medium_value', 'profiles': [{'consumer_group': 1, 'display_name': 'display_name_value', 'host_format': 1, 'is_regional': True, 'protocol': 1, 'session_mode': 1, 'syntax_format': 1, 'tls_authentication': 1, 'value': 'value_value'}]}, 'connection_urls': {'apex_uri': 'apex_uri_value', 'database_transforms_uri': 'database_transforms_uri_value', 'graph_studio_uri': 'graph_studio_uri_value', 'machine_learning_notebook_uri': 'machine_learning_notebook_uri_value', 'machine_learning_user_management_uri': 'machine_learning_user_management_uri_value', 'mongo_db_uri': 'mongo_db_uri_value', 'ords_uri': 'ords_uri_value', 'sql_dev_web_uri': 'sql_dev_web_uri_value'}, 'failed_data_recovery_duration': {'seconds': 751, 'nanos': 543}, 'memory_table_gbs': 1691, 'is_local_data_guard_enabled': True, 'local_adg_auto_failover_max_data_loss_limit': 4513, 'local_standby_db': {'lag_time_duration': {}, 'lifecycle_details': 'lifecycle_details_value', 'state': 1, 'data_guard_role_changed_time': {'seconds': 751, 'nanos': 543}, 'disaster_recovery_role_changed_time': {}}, 'memory_per_oracle_compute_unit_gbs': 3626, 'local_disaster_recovery_type': 1, 'data_safe_state': 1, 'database_management_state': 1, 'open_mode': 1, 'operations_insights_state': 1, 'peer_db_ids': ['peer_db_ids_value1', 'peer_db_ids_value2'], 'permission_level': 1, 'private_endpoint': 'private_endpoint_value', 'refreshable_mode': 1, 'refreshable_state': 1, 'role': 1, 'scheduled_operation_details': [{'day_of_week': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'stop_time': {}}], 'sql_web_developer_url': 'sql_web_developer_url_value', 'supported_clone_regions': ['supported_clone_regions_value1', 'supported_clone_regions_value2'], 'used_data_storage_size_tbs': 2752, 'oci_url': 'oci_url_value', 'total_auto_backup_storage_size_gbs': 0.36100000000000004, 'next_long_term_backup_time': {}, 'maintenance_begin_time': {}, 'maintenance_end_time': {}}, 'labels': {}, 'network': 'network_value', 'cidr': 'cidr_value', 'create_time': {}} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["autonomous_database"] = { + "name": "name_value", + "database": "database_value", + "display_name": "display_name_value", + "entitlement_id": "entitlement_id_value", + "admin_password": "admin_password_value", + "properties": { + "ocid": "ocid_value", + "compute_count": 0.1413, + "cpu_core_count": 1496, + "data_storage_size_tb": 2109, + "data_storage_size_gb": 2096, + "db_workload": 1, + "db_edition": 1, + "character_set": "character_set_value", + "n_character_set": "n_character_set_value", + "private_endpoint_ip": "private_endpoint_ip_value", + "private_endpoint_label": "private_endpoint_label_value", + "db_version": "db_version_value", + "is_auto_scaling_enabled": True, + "is_storage_auto_scaling_enabled": True, + "license_type": 1, + "customer_contacts": [{"email": "email_value"}], + "secret_id": "secret_id_value", + "vault_id": "vault_id_value", + "maintenance_schedule_type": 1, + "mtls_connection_required": True, + "backup_retention_period_days": 2975, + "actual_used_data_storage_size_tb": 0.3366, + "allocated_storage_size_tb": 0.2636, + "apex_details": { + "apex_version": "apex_version_value", + "ords_version": "ords_version_value", + }, + "are_primary_allowlisted_ips_used": True, + "lifecycle_details": "lifecycle_details_value", + "state": 1, + "autonomous_container_database_id": "autonomous_container_database_id_value", + "available_upgrade_versions": [ + "available_upgrade_versions_value1", + "available_upgrade_versions_value2", + ], + "connection_strings": { + "all_connection_strings": { + "high": "high_value", + "low": "low_value", + "medium": "medium_value", + }, + "dedicated": "dedicated_value", + "high": "high_value", + "low": "low_value", + "medium": "medium_value", + "profiles": [ + { + "consumer_group": 1, + "display_name": "display_name_value", + "host_format": 1, + "is_regional": True, + "protocol": 1, + "session_mode": 1, + "syntax_format": 1, + "tls_authentication": 1, + "value": "value_value", + } + ], + }, + "connection_urls": { + "apex_uri": "apex_uri_value", + "database_transforms_uri": "database_transforms_uri_value", + "graph_studio_uri": "graph_studio_uri_value", + "machine_learning_notebook_uri": "machine_learning_notebook_uri_value", + "machine_learning_user_management_uri": "machine_learning_user_management_uri_value", + "mongo_db_uri": "mongo_db_uri_value", + "ords_uri": "ords_uri_value", + "sql_dev_web_uri": "sql_dev_web_uri_value", + }, + "failed_data_recovery_duration": {"seconds": 751, "nanos": 543}, + "memory_table_gbs": 1691, + "is_local_data_guard_enabled": True, + "local_adg_auto_failover_max_data_loss_limit": 4513, + "local_standby_db": { + "lag_time_duration": {}, + "lifecycle_details": "lifecycle_details_value", + "state": 1, + "data_guard_role_changed_time": {"seconds": 751, "nanos": 543}, + "disaster_recovery_role_changed_time": {}, + }, + "memory_per_oracle_compute_unit_gbs": 3626, + "local_disaster_recovery_type": 1, + "data_safe_state": 1, + "database_management_state": 1, + "open_mode": 1, + "operations_insights_state": 1, + "peer_db_ids": ["peer_db_ids_value1", "peer_db_ids_value2"], + "permission_level": 1, + "private_endpoint": "private_endpoint_value", + "refreshable_mode": 1, + "refreshable_state": 1, + "role": 1, + "scheduled_operation_details": [ + { + "day_of_week": 1, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "stop_time": {}, + } + ], + "sql_web_developer_url": "sql_web_developer_url_value", + "supported_clone_regions": [ + "supported_clone_regions_value1", + "supported_clone_regions_value2", + ], + "used_data_storage_size_tbs": 2752, + "oci_url": "oci_url_value", + "total_auto_backup_storage_size_gbs": 0.36100000000000004, + "next_long_term_backup_time": {}, + "maintenance_begin_time": {}, + "maintenance_end_time": {}, + }, + "labels": {}, + "network": "network_value", + "cidr": "cidr_value", + "create_time": {}, + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = oracledatabase.CreateAutonomousDatabaseRequest.meta.fields["autonomous_database"] + test_field = oracledatabase.CreateAutonomousDatabaseRequest.meta.fields[ + "autonomous_database" + ] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -5373,7 +6796,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -5387,7 +6810,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["autonomous_database"].items(): # pragma: NO COVER + for field, value in request_init["autonomous_database"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -5402,12 +6825,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -5420,22 +6847,23 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.create_autonomous_database(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" + def test_create_autonomous_database_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5450,12 +6878,19 @@ def test_create_autonomous_database_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_autonomous_database in client._transport._wrapped_methods + assert ( + client._transport.create_autonomous_database + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_autonomous_database] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_autonomous_database + ] = mock_rpc request = {} client.create_autonomous_database(request) @@ -5474,7 +6909,9 @@ def test_create_autonomous_database_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_create_autonomous_database_rest_required_fields(request_type=oracledatabase.CreateAutonomousDatabaseRequest): +def test_create_autonomous_database_rest_required_fields( + request_type=oracledatabase.CreateAutonomousDatabaseRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} @@ -5482,65 +6919,76 @@ def test_create_autonomous_database_rest_required_fields(request_type=oracledata request_init["autonomous_database_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "autonomousDatabaseId" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_autonomous_database._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_autonomous_database._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "autonomousDatabaseId" in jsonified_request - assert jsonified_request["autonomousDatabaseId"] == request_init["autonomous_database_id"] + assert ( + jsonified_request["autonomousDatabaseId"] + == request_init["autonomous_database_id"] + ) - jsonified_request["parent"] = 'parent_value' - jsonified_request["autonomousDatabaseId"] = 'autonomous_database_id_value' + jsonified_request["parent"] = "parent_value" + jsonified_request["autonomousDatabaseId"] = "autonomous_database_id_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_autonomous_database._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_autonomous_database._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("autonomous_database_id", "request_id", )) + assert not set(unset_fields) - set( + ( + "autonomous_database_id", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" assert "autonomousDatabaseId" in jsonified_request - assert jsonified_request["autonomousDatabaseId"] == 'autonomous_database_id_value' + assert jsonified_request["autonomousDatabaseId"] == "autonomous_database_id_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.create_autonomous_database(request) @@ -5550,34 +6998,60 @@ def test_create_autonomous_database_rest_required_fields(request_type=oracledata "autonomousDatabaseId", "", ), - ('$alt', 'json;enum-encoding=int') + ("$alt", "json;enum-encoding=int"), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_create_autonomous_database_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.create_autonomous_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(("autonomousDatabaseId", "requestId", )) & set(("parent", "autonomousDatabaseId", "autonomousDatabase", ))) + assert set(unset_fields) == ( + set( + ( + "autonomousDatabaseId", + "requestId", + ) + ) + & set( + ( + "parent", + "autonomousDatabaseId", + "autonomousDatabase", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_create_autonomous_database_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_create_autonomous_database") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_create_autonomous_database") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_create_autonomous_database" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_create_autonomous_database" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.CreateAutonomousDatabaseRequest.pb(oracledatabase.CreateAutonomousDatabaseRequest()) + pb_message = oracledatabase.CreateAutonomousDatabaseRequest.pb( + oracledatabase.CreateAutonomousDatabaseRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -5588,34 +7062,46 @@ def test_create_autonomous_database_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) request = oracledatabase.CreateAutonomousDatabaseRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_autonomous_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.create_autonomous_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_create_autonomous_database_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.CreateAutonomousDatabaseRequest): +def test_create_autonomous_database_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.CreateAutonomousDatabaseRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -5631,18 +7117,20 @@ def test_create_autonomous_database_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', - autonomous_database=gco_autonomous_database.AutonomousDatabase(name='name_value'), - autonomous_database_id='autonomous_database_id_value', + parent="parent_value", + autonomous_database=gco_autonomous_database.AutonomousDatabase( + name="name_value" + ), + autonomous_database_id="autonomous_database_id_value", ) mock_args.update(sample_request) @@ -5650,7 +7138,7 @@ def test_create_autonomous_database_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.create_autonomous_database(**mock_args) @@ -5659,10 +7147,14 @@ def test_create_autonomous_database_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/autonomousDatabases" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/autonomousDatabases" + % client.transport._host, + args[1], + ) -def test_create_autonomous_database_rest_flattened_error(transport: str = 'rest'): +def test_create_autonomous_database_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5673,23 +7165,27 @@ def test_create_autonomous_database_rest_flattened_error(transport: str = 'rest' with pytest.raises(ValueError): client.create_autonomous_database( oracledatabase.CreateAutonomousDatabaseRequest(), - parent='parent_value', - autonomous_database=gco_autonomous_database.AutonomousDatabase(name='name_value'), - autonomous_database_id='autonomous_database_id_value', + parent="parent_value", + autonomous_database=gco_autonomous_database.AutonomousDatabase( + name="name_value" + ), + autonomous_database_id="autonomous_database_id_value", ) def test_create_autonomous_database_rest_error(): client = OracleDatabaseClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) -@pytest.mark.parametrize("request_type", [ - oracledatabase.DeleteAutonomousDatabaseRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.DeleteAutonomousDatabaseRequest, + dict, + ], +) def test_delete_autonomous_database_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5697,26 +7193,29 @@ def test_delete_autonomous_database_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_autonomous_database(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" + def test_delete_autonomous_database_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5731,12 +7230,19 @@ def test_delete_autonomous_database_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_autonomous_database in client._transport._wrapped_methods + assert ( + client._transport.delete_autonomous_database + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_autonomous_database] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_autonomous_database + ] = mock_rpc request = {} client.delete_autonomous_database(request) @@ -5755,57 +7261,62 @@ def test_delete_autonomous_database_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_delete_autonomous_database_rest_required_fields(request_type=oracledatabase.DeleteAutonomousDatabaseRequest): +def test_delete_autonomous_database_rest_required_fields( + request_type=oracledatabase.DeleteAutonomousDatabaseRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_autonomous_database._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_autonomous_database._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_autonomous_database._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_autonomous_database._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id", )) + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -5813,40 +7324,50 @@ def test_delete_autonomous_database_rest_required_fields(request_type=oracledata response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_autonomous_database(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_delete_autonomous_database_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.delete_autonomous_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId", )) & set(("name", ))) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_delete_autonomous_database_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_delete_autonomous_database") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_delete_autonomous_database") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_delete_autonomous_database" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_delete_autonomous_database" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.DeleteAutonomousDatabaseRequest.pb(oracledatabase.DeleteAutonomousDatabaseRequest()) + pb_message = oracledatabase.DeleteAutonomousDatabaseRequest.pb( + oracledatabase.DeleteAutonomousDatabaseRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -5857,34 +7378,48 @@ def test_delete_autonomous_database_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) request = oracledatabase.DeleteAutonomousDatabaseRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_autonomous_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.delete_autonomous_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_delete_autonomous_database_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.DeleteAutonomousDatabaseRequest): +def test_delete_autonomous_database_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.DeleteAutonomousDatabaseRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -5900,16 +7435,18 @@ def test_delete_autonomous_database_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -5917,7 +7454,7 @@ def test_delete_autonomous_database_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.delete_autonomous_database(**mock_args) @@ -5926,10 +7463,14 @@ def test_delete_autonomous_database_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/autonomousDatabases/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/autonomousDatabases/*}" + % client.transport._host, + args[1], + ) -def test_delete_autonomous_database_rest_flattened_error(transport: str = 'rest'): +def test_delete_autonomous_database_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5940,21 +7481,23 @@ def test_delete_autonomous_database_rest_flattened_error(transport: str = 'rest' with pytest.raises(ValueError): client.delete_autonomous_database( oracledatabase.DeleteAutonomousDatabaseRequest(), - name='name_value', + name="name_value", ) def test_delete_autonomous_database_rest_error(): client = OracleDatabaseClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) -@pytest.mark.parametrize("request_type", [ - oracledatabase.RestoreAutonomousDatabaseRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.RestoreAutonomousDatabaseRequest, + dict, + ], +) def test_restore_autonomous_database_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5962,26 +7505,29 @@ def test_restore_autonomous_database_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.restore_autonomous_database(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" + def test_restore_autonomous_database_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5996,12 +7542,19 @@ def test_restore_autonomous_database_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.restore_autonomous_database in client._transport._wrapped_methods + assert ( + client._transport.restore_autonomous_database + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.restore_autonomous_database] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.restore_autonomous_database + ] = mock_rpc request = {} client.restore_autonomous_database(request) @@ -6020,97 +7573,120 @@ def test_restore_autonomous_database_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_restore_autonomous_database_rest_required_fields(request_type=oracledatabase.RestoreAutonomousDatabaseRequest): +def test_restore_autonomous_database_rest_required_fields( + request_type=oracledatabase.RestoreAutonomousDatabaseRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).restore_autonomous_database._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restore_autonomous_database._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).restore_autonomous_database._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restore_autonomous_database._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.restore_autonomous_database(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_restore_autonomous_database_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.restore_autonomous_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "restoreTime", ))) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "restoreTime", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_restore_autonomous_database_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_restore_autonomous_database") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_restore_autonomous_database") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_restore_autonomous_database" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_restore_autonomous_database" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.RestoreAutonomousDatabaseRequest.pb(oracledatabase.RestoreAutonomousDatabaseRequest()) + pb_message = oracledatabase.RestoreAutonomousDatabaseRequest.pb( + oracledatabase.RestoreAutonomousDatabaseRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -6121,34 +7697,49 @@ def test_restore_autonomous_database_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) request = oracledatabase.RestoreAutonomousDatabaseRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.restore_autonomous_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.restore_autonomous_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_restore_autonomous_database_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.RestoreAutonomousDatabaseRequest): +def test_restore_autonomous_database_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.RestoreAutonomousDatabaseRequest, +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -6164,16 +7755,18 @@ def test_restore_autonomous_database_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", restore_time=timestamp_pb2.Timestamp(seconds=751), ) mock_args.update(sample_request) @@ -6182,7 +7775,7 @@ def test_restore_autonomous_database_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.restore_autonomous_database(**mock_args) @@ -6191,10 +7784,14 @@ def test_restore_autonomous_database_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/autonomousDatabases/*}:restore" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/autonomousDatabases/*}:restore" + % client.transport._host, + args[1], + ) -def test_restore_autonomous_database_rest_flattened_error(transport: str = 'rest'): +def test_restore_autonomous_database_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6205,22 +7802,24 @@ def test_restore_autonomous_database_rest_flattened_error(transport: str = 'rest with pytest.raises(ValueError): client.restore_autonomous_database( oracledatabase.RestoreAutonomousDatabaseRequest(), - name='name_value', + name="name_value", restore_time=timestamp_pb2.Timestamp(seconds=751), ) def test_restore_autonomous_database_rest_error(): client = OracleDatabaseClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) -@pytest.mark.parametrize("request_type", [ - oracledatabase.GenerateAutonomousDatabaseWalletRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.GenerateAutonomousDatabaseWalletRequest, + dict, + ], +) def test_generate_autonomous_database_wallet_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6228,30 +7827,35 @@ def test_generate_autonomous_database_wallet_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse( - archive_content=b'archive_content_blob', + archive_content=b"archive_content_blob", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse.pb(return_value) + return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.generate_autonomous_database_wallet(request) # Establish that the response is the type that we expect. assert isinstance(response, oracledatabase.GenerateAutonomousDatabaseWalletResponse) - assert response.archive_content == b'archive_content_blob' + assert response.archive_content == b"archive_content_blob" + def test_generate_autonomous_database_wallet_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6267,12 +7871,19 @@ def test_generate_autonomous_database_wallet_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.generate_autonomous_database_wallet in client._transport._wrapped_methods + assert ( + client._transport.generate_autonomous_database_wallet + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.generate_autonomous_database_wallet] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.generate_autonomous_database_wallet + ] = mock_rpc request = {} client.generate_autonomous_database_wallet(request) @@ -6287,7 +7898,9 @@ def test_generate_autonomous_database_wallet_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_generate_autonomous_database_wallet_rest_required_fields(request_type=oracledatabase.GenerateAutonomousDatabaseWalletRequest): +def test_generate_autonomous_database_wallet_rest_required_fields( + request_type=oracledatabase.GenerateAutonomousDatabaseWalletRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} @@ -6295,95 +7908,121 @@ def test_generate_autonomous_database_wallet_rest_required_fields(request_type=o request_init["password"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_autonomous_database_wallet._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_autonomous_database_wallet._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' - jsonified_request["password"] = 'password_value' + jsonified_request["name"] = "name_value" + jsonified_request["password"] = "password_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_autonomous_database_wallet._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_autonomous_database_wallet._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" assert "password" in jsonified_request - assert jsonified_request["password"] == 'password_value' + assert jsonified_request["password"] == "password_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse.pb(return_value) + return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.generate_autonomous_database_wallet(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_generate_autonomous_database_wallet_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - unset_fields = transport.generate_autonomous_database_wallet._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "password", ))) + unset_fields = ( + transport.generate_autonomous_database_wallet._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "password", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_generate_autonomous_database_wallet_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_generate_autonomous_database_wallet") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_generate_autonomous_database_wallet") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "post_generate_autonomous_database_wallet", + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "pre_generate_autonomous_database_wallet", + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.GenerateAutonomousDatabaseWalletRequest.pb(oracledatabase.GenerateAutonomousDatabaseWalletRequest()) + pb_message = oracledatabase.GenerateAutonomousDatabaseWalletRequest.pb( + oracledatabase.GenerateAutonomousDatabaseWalletRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -6394,34 +8033,51 @@ def test_generate_autonomous_database_wallet_rest_interceptors(null_interceptor) req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = oracledatabase.GenerateAutonomousDatabaseWalletResponse.to_json(oracledatabase.GenerateAutonomousDatabaseWalletResponse()) + req.return_value._content = ( + oracledatabase.GenerateAutonomousDatabaseWalletResponse.to_json( + oracledatabase.GenerateAutonomousDatabaseWalletResponse() + ) + ) request = oracledatabase.GenerateAutonomousDatabaseWalletRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse() - client.generate_autonomous_database_wallet(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.generate_autonomous_database_wallet( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_generate_autonomous_database_wallet_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.GenerateAutonomousDatabaseWalletRequest): +def test_generate_autonomous_database_wallet_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.GenerateAutonomousDatabaseWalletRequest, +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -6437,19 +8093,21 @@ def test_generate_autonomous_database_wallet_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/autonomousDatabases/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/autonomousDatabases/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", type_=autonomous_database.GenerateType.ALL, is_regional=True, - password='password_value', + password="password_value", ) mock_args.update(sample_request) @@ -6457,9 +8115,11 @@ def test_generate_autonomous_database_wallet_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse.pb(return_value) + return_value = oracledatabase.GenerateAutonomousDatabaseWalletResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.generate_autonomous_database_wallet(**mock_args) @@ -6468,10 +8128,16 @@ def test_generate_autonomous_database_wallet_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/autonomousDatabases/*}:generateWallet" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/autonomousDatabases/*}:generateWallet" + % client.transport._host, + args[1], + ) -def test_generate_autonomous_database_wallet_rest_flattened_error(transport: str = 'rest'): +def test_generate_autonomous_database_wallet_rest_flattened_error( + transport: str = "rest", +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6482,24 +8148,26 @@ def test_generate_autonomous_database_wallet_rest_flattened_error(transport: str with pytest.raises(ValueError): client.generate_autonomous_database_wallet( oracledatabase.GenerateAutonomousDatabaseWalletRequest(), - name='name_value', + name="name_value", type_=autonomous_database.GenerateType.ALL, is_regional=True, - password='password_value', + password="password_value", ) def test_generate_autonomous_database_wallet_rest_error(): client = OracleDatabaseClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) -@pytest.mark.parametrize("request_type", [ - oracledatabase.ListAutonomousDbVersionsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListAutonomousDbVersionsRequest, + dict, + ], +) def test_list_autonomous_db_versions_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6507,14 +8175,14 @@ def test_list_autonomous_db_versions_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListAutonomousDbVersionsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -6524,13 +8192,14 @@ def test_list_autonomous_db_versions_rest(request_type): return_value = oracledatabase.ListAutonomousDbVersionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_autonomous_db_versions(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAutonomousDbVersionsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" + def test_list_autonomous_db_versions_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6546,12 +8215,19 @@ def test_list_autonomous_db_versions_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_autonomous_db_versions in client._transport._wrapped_methods + assert ( + client._transport.list_autonomous_db_versions + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_autonomous_db_versions] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_autonomous_db_versions + ] = mock_rpc request = {} client.list_autonomous_db_versions(request) @@ -6566,57 +8242,67 @@ def test_list_autonomous_db_versions_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_autonomous_db_versions_rest_required_fields(request_type=oracledatabase.ListAutonomousDbVersionsRequest): +def test_list_autonomous_db_versions_rest_required_fields( + request_type=oracledatabase.ListAutonomousDbVersionsRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_autonomous_db_versions._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_db_versions._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_autonomous_db_versions._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_db_versions._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = oracledatabase.ListAutonomousDbVersionsResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -6624,42 +8310,61 @@ def test_list_autonomous_db_versions_rest_required_fields(request_type=oracledat response_value.status_code = 200 # Convert return value to protobuf type - return_value = oracledatabase.ListAutonomousDbVersionsResponse.pb(return_value) + return_value = oracledatabase.ListAutonomousDbVersionsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_autonomous_db_versions(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_autonomous_db_versions_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_autonomous_db_versions._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_autonomous_db_versions_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_autonomous_db_versions") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_autonomous_db_versions") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "post_list_autonomous_db_versions" + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_autonomous_db_versions" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.ListAutonomousDbVersionsRequest.pb(oracledatabase.ListAutonomousDbVersionsRequest()) + pb_message = oracledatabase.ListAutonomousDbVersionsRequest.pb( + oracledatabase.ListAutonomousDbVersionsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -6670,34 +8375,48 @@ def test_list_autonomous_db_versions_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = oracledatabase.ListAutonomousDbVersionsResponse.to_json(oracledatabase.ListAutonomousDbVersionsResponse()) + req.return_value._content = ( + oracledatabase.ListAutonomousDbVersionsResponse.to_json( + oracledatabase.ListAutonomousDbVersionsResponse() + ) + ) request = oracledatabase.ListAutonomousDbVersionsRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = oracledatabase.ListAutonomousDbVersionsResponse() - client.list_autonomous_db_versions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_autonomous_db_versions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_list_autonomous_db_versions_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListAutonomousDbVersionsRequest): +def test_list_autonomous_db_versions_rest_bad_request( + transport: str = "rest", request_type=oracledatabase.ListAutonomousDbVersionsRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -6713,16 +8432,16 @@ def test_list_autonomous_db_versions_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListAutonomousDbVersionsResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -6732,7 +8451,7 @@ def test_list_autonomous_db_versions_rest_flattened(): # Convert return value to protobuf type return_value = oracledatabase.ListAutonomousDbVersionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.list_autonomous_db_versions(**mock_args) @@ -6741,10 +8460,14 @@ def test_list_autonomous_db_versions_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/autonomousDbVersions" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/autonomousDbVersions" + % client.transport._host, + args[1], + ) -def test_list_autonomous_db_versions_rest_flattened_error(transport: str = 'rest'): +def test_list_autonomous_db_versions_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6755,20 +8478,20 @@ def test_list_autonomous_db_versions_rest_flattened_error(transport: str = 'rest with pytest.raises(ValueError): client.list_autonomous_db_versions( oracledatabase.ListAutonomousDbVersionsRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_autonomous_db_versions_rest_pager(transport: str = 'rest'): +def test_list_autonomous_db_versions_rest_pager(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( oracledatabase.ListAutonomousDbVersionsResponse( @@ -6777,17 +8500,17 @@ def test_list_autonomous_db_versions_rest_pager(transport: str = 'rest'): autonomous_db_version.AutonomousDbVersion(), autonomous_db_version.AutonomousDbVersion(), ], - next_page_token='abc', + next_page_token="abc", ), oracledatabase.ListAutonomousDbVersionsResponse( autonomous_db_versions=[], - next_page_token='def', + next_page_token="def", ), oracledatabase.ListAutonomousDbVersionsResponse( autonomous_db_versions=[ autonomous_db_version.AutonomousDbVersion(), ], - next_page_token='ghi', + next_page_token="ghi", ), oracledatabase.ListAutonomousDbVersionsResponse( autonomous_db_versions=[ @@ -6800,31 +8523,37 @@ def test_list_autonomous_db_versions_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(oracledatabase.ListAutonomousDbVersionsResponse.to_json(x) for x in response) + response = tuple( + oracledatabase.ListAutonomousDbVersionsResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} pager = client.list_autonomous_db_versions(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, autonomous_db_version.AutonomousDbVersion) - for i in results) + assert all( + isinstance(i, autonomous_db_version.AutonomousDbVersion) for i in results + ) pages = list(client.list_autonomous_db_versions(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, + dict, + ], +) def test_list_autonomous_database_character_sets_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6832,30 +8561,33 @@ def test_list_autonomous_database_character_sets_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.pb(return_value) + return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_autonomous_database_character_sets(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAutonomousDatabaseCharacterSetsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" + def test_list_autonomous_database_character_sets_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6871,12 +8603,19 @@ def test_list_autonomous_database_character_sets_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_autonomous_database_character_sets in client._transport._wrapped_methods + assert ( + client._transport.list_autonomous_database_character_sets + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_autonomous_database_character_sets] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_autonomous_database_character_sets + ] = mock_rpc request = {} client.list_autonomous_database_character_sets(request) @@ -6891,57 +8630,72 @@ def test_list_autonomous_database_character_sets_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_autonomous_database_character_sets_rest_required_fields(request_type=oracledatabase.ListAutonomousDatabaseCharacterSetsRequest): +def test_list_autonomous_database_character_sets_rest_required_fields( + request_type=oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_autonomous_database_character_sets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_database_character_sets._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_autonomous_database_character_sets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_database_character_sets._get_unset_required_fields( + jsonified_request + ) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -6949,42 +8703,68 @@ def test_list_autonomous_database_character_sets_rest_required_fields(request_ty response_value.status_code = 200 # Convert return value to protobuf type - return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.pb(return_value) + return_value = ( + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.pb( + return_value + ) + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_autonomous_database_character_sets(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_autonomous_database_character_sets_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - unset_fields = transport.list_autonomous_database_character_sets._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + unset_fields = ( + transport.list_autonomous_database_character_sets._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_autonomous_database_character_sets_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_autonomous_database_character_sets") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_autonomous_database_character_sets") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "post_list_autonomous_database_character_sets", + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "pre_list_autonomous_database_character_sets", + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest.pb(oracledatabase.ListAutonomousDatabaseCharacterSetsRequest()) + pb_message = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest.pb( + oracledatabase.ListAutonomousDatabaseCharacterSetsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -6995,34 +8775,49 @@ def test_list_autonomous_database_character_sets_rest_interceptors(null_intercep req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.to_json(oracledatabase.ListAutonomousDatabaseCharacterSetsResponse()) + req.return_value._content = ( + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.to_json( + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse() + ) + ) request = oracledatabase.ListAutonomousDatabaseCharacterSetsRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse() - client.list_autonomous_database_character_sets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_autonomous_database_character_sets( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_list_autonomous_database_character_sets_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListAutonomousDatabaseCharacterSetsRequest): +def test_list_autonomous_database_character_sets_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.ListAutonomousDatabaseCharacterSetsRequest, +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -7038,16 +8833,16 @@ def test_list_autonomous_database_character_sets_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -7055,9 +8850,11 @@ def test_list_autonomous_database_character_sets_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.pb(return_value) + return_value = oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.list_autonomous_database_character_sets(**mock_args) @@ -7066,10 +8863,16 @@ def test_list_autonomous_database_character_sets_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/autonomousDatabaseCharacterSets" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/autonomousDatabaseCharacterSets" + % client.transport._host, + args[1], + ) -def test_list_autonomous_database_character_sets_rest_flattened_error(transport: str = 'rest'): +def test_list_autonomous_database_character_sets_rest_flattened_error( + transport: str = "rest", +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7080,20 +8883,20 @@ def test_list_autonomous_database_character_sets_rest_flattened_error(transport: with pytest.raises(ValueError): client.list_autonomous_database_character_sets( oracledatabase.ListAutonomousDatabaseCharacterSetsRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_autonomous_database_character_sets_rest_pager(transport: str = 'rest'): +def test_list_autonomous_database_character_sets_rest_pager(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( oracledatabase.ListAutonomousDatabaseCharacterSetsResponse( @@ -7102,17 +8905,17 @@ def test_list_autonomous_database_character_sets_rest_pager(transport: str = 're autonomous_database_character_set.AutonomousDatabaseCharacterSet(), autonomous_database_character_set.AutonomousDatabaseCharacterSet(), ], - next_page_token='abc', + next_page_token="abc", ), oracledatabase.ListAutonomousDatabaseCharacterSetsResponse( autonomous_database_character_sets=[], - next_page_token='def', + next_page_token="def", ), oracledatabase.ListAutonomousDatabaseCharacterSetsResponse( autonomous_database_character_sets=[ autonomous_database_character_set.AutonomousDatabaseCharacterSet(), ], - next_page_token='ghi', + next_page_token="ghi", ), oracledatabase.ListAutonomousDatabaseCharacterSetsResponse( autonomous_database_character_sets=[ @@ -7125,31 +8928,43 @@ def test_list_autonomous_database_character_sets_rest_pager(transport: str = 're response = response + response # Wrap the values into proper Response objs - response = tuple(oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.to_json(x) for x in response) + response = tuple( + oracledatabase.ListAutonomousDatabaseCharacterSetsResponse.to_json(x) + for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} pager = client.list_autonomous_database_character_sets(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, autonomous_database_character_set.AutonomousDatabaseCharacterSet) - for i in results) + assert all( + isinstance( + i, autonomous_database_character_set.AutonomousDatabaseCharacterSet + ) + for i in results + ) - pages = list(client.list_autonomous_database_character_sets(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + pages = list( + client.list_autonomous_database_character_sets(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - oracledatabase.ListAutonomousDatabaseBackupsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + oracledatabase.ListAutonomousDatabaseBackupsRequest, + dict, + ], +) def test_list_autonomous_database_backups_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7157,30 +8972,33 @@ def test_list_autonomous_database_backups_rest(request_type): ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse.pb(return_value) + return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_autonomous_database_backups(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAutonomousDatabaseBackupsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" + def test_list_autonomous_database_backups_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7196,12 +9014,19 @@ def test_list_autonomous_database_backups_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_autonomous_database_backups in client._transport._wrapped_methods + assert ( + client._transport.list_autonomous_database_backups + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_autonomous_database_backups] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_autonomous_database_backups + ] = mock_rpc request = {} client.list_autonomous_database_backups(request) @@ -7216,57 +9041,68 @@ def test_list_autonomous_database_backups_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_autonomous_database_backups_rest_required_fields(request_type=oracledatabase.ListAutonomousDatabaseBackupsRequest): +def test_list_autonomous_database_backups_rest_required_fields( + request_type=oracledatabase.ListAutonomousDatabaseBackupsRequest, +): transport_class = transports.OracleDatabaseRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_autonomous_database_backups._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_database_backups._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_autonomous_database_backups._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autonomous_database_backups._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -7274,42 +9110,65 @@ def test_list_autonomous_database_backups_rest_required_fields(request_type=orac response_value.status_code = 200 # Convert return value to protobuf type - return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse.pb(return_value) + return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_autonomous_database_backups(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_autonomous_database_backups_rest_unset_required_fields(): - transport = transports.OracleDatabaseRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.OracleDatabaseRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - unset_fields = transport.list_autonomous_database_backups._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + unset_fields = ( + transport.list_autonomous_database_backups._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_autonomous_database_backups_rest_interceptors(null_interceptor): transport = transports.OracleDatabaseRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.OracleDatabaseRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.OracleDatabaseRestInterceptor(), + ) client = OracleDatabaseClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "post_list_autonomous_database_backups") as post, \ - mock.patch.object(transports.OracleDatabaseRestInterceptor, "pre_list_autonomous_database_backups") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OracleDatabaseRestInterceptor, + "post_list_autonomous_database_backups", + ) as post, mock.patch.object( + transports.OracleDatabaseRestInterceptor, "pre_list_autonomous_database_backups" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = oracledatabase.ListAutonomousDatabaseBackupsRequest.pb(oracledatabase.ListAutonomousDatabaseBackupsRequest()) + pb_message = oracledatabase.ListAutonomousDatabaseBackupsRequest.pb( + oracledatabase.ListAutonomousDatabaseBackupsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -7320,34 +9179,49 @@ def test_list_autonomous_database_backups_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = oracledatabase.ListAutonomousDatabaseBackupsResponse.to_json(oracledatabase.ListAutonomousDatabaseBackupsResponse()) + req.return_value._content = ( + oracledatabase.ListAutonomousDatabaseBackupsResponse.to_json( + oracledatabase.ListAutonomousDatabaseBackupsResponse() + ) + ) request = oracledatabase.ListAutonomousDatabaseBackupsRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse() - client.list_autonomous_database_backups(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_autonomous_database_backups( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_list_autonomous_database_backups_rest_bad_request(transport: str = 'rest', request_type=oracledatabase.ListAutonomousDatabaseBackupsRequest): +def test_list_autonomous_database_backups_rest_bad_request( + transport: str = "rest", + request_type=oracledatabase.ListAutonomousDatabaseBackupsRequest, +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -7363,16 +9237,16 @@ def test_list_autonomous_database_backups_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -7380,9 +9254,11 @@ def test_list_autonomous_database_backups_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse.pb(return_value) + return_value = oracledatabase.ListAutonomousDatabaseBackupsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.list_autonomous_database_backups(**mock_args) @@ -7391,10 +9267,14 @@ def test_list_autonomous_database_backups_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/autonomousDatabaseBackups" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/autonomousDatabaseBackups" + % client.transport._host, + args[1], + ) -def test_list_autonomous_database_backups_rest_flattened_error(transport: str = 'rest'): +def test_list_autonomous_database_backups_rest_flattened_error(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7405,20 +9285,20 @@ def test_list_autonomous_database_backups_rest_flattened_error(transport: str = with pytest.raises(ValueError): client.list_autonomous_database_backups( oracledatabase.ListAutonomousDatabaseBackupsRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_autonomous_database_backups_rest_pager(transport: str = 'rest'): +def test_list_autonomous_database_backups_rest_pager(transport: str = "rest"): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( oracledatabase.ListAutonomousDatabaseBackupsResponse( @@ -7427,17 +9307,17 @@ def test_list_autonomous_database_backups_rest_pager(transport: str = 'rest'): autonomous_db_backup.AutonomousDatabaseBackup(), autonomous_db_backup.AutonomousDatabaseBackup(), ], - next_page_token='abc', + next_page_token="abc", ), oracledatabase.ListAutonomousDatabaseBackupsResponse( autonomous_database_backups=[], - next_page_token='def', + next_page_token="def", ), oracledatabase.ListAutonomousDatabaseBackupsResponse( autonomous_database_backups=[ autonomous_db_backup.AutonomousDatabaseBackup(), ], - next_page_token='ghi', + next_page_token="ghi", ), oracledatabase.ListAutonomousDatabaseBackupsResponse( autonomous_database_backups=[ @@ -7450,24 +9330,31 @@ def test_list_autonomous_database_backups_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(oracledatabase.ListAutonomousDatabaseBackupsResponse.to_json(x) for x in response) + response = tuple( + oracledatabase.ListAutonomousDatabaseBackupsResponse.to_json(x) + for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} pager = client.list_autonomous_database_backups(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, autonomous_db_backup.AutonomousDatabaseBackup) - for i in results) + assert all( + isinstance(i, autonomous_db_backup.AutonomousDatabaseBackup) + for i in results + ) - pages = list(client.list_autonomous_database_backups(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + pages = list( + client.list_autonomous_database_backups(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -7509,8 +9396,7 @@ def test_credentials_transport_error(): options.api_key = "api_key" with pytest.raises(ValueError): client = OracleDatabaseClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() + client_options=options, credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. @@ -7533,19 +9419,26 @@ def test_transport_instance(): assert client.transport is transport -@pytest.mark.parametrize("transport_class", [ - transports.OracleDatabaseRestTransport, -]) +@pytest.mark.parametrize( + "transport_class", + [ + transports.OracleDatabaseRestTransport, + ], +) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() -@pytest.mark.parametrize("transport_name", [ - "rest", -]) + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) def test_transport_kind(transport_name): transport = OracleDatabaseClient.get_transport_class(transport_name)( credentials=ga_credentials.AnonymousCredentials(), @@ -7558,13 +9451,15 @@ def test_oracle_database_base_transport_error(): with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.OracleDatabaseTransport( credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" + credentials_file="credentials.json", ) def test_oracle_database_base_transport(): # Instantiate the base transport. - with mock.patch('google.cloud.oracledatabase_v1.services.oracle_database.transports.OracleDatabaseTransport.__init__') as Transport: + with mock.patch( + "google.cloud.oracledatabase_v1.services.oracle_database.transports.OracleDatabaseTransport.__init__" + ) as Transport: Transport.return_value = None transport = transports.OracleDatabaseTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -7573,34 +9468,34 @@ def test_oracle_database_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( - 'list_cloud_exadata_infrastructures', - 'get_cloud_exadata_infrastructure', - 'create_cloud_exadata_infrastructure', - 'delete_cloud_exadata_infrastructure', - 'list_cloud_vm_clusters', - 'get_cloud_vm_cluster', - 'create_cloud_vm_cluster', - 'delete_cloud_vm_cluster', - 'list_entitlements', - 'list_db_servers', - 'list_db_nodes', - 'list_gi_versions', - 'list_db_system_shapes', - 'list_autonomous_databases', - 'get_autonomous_database', - 'create_autonomous_database', - 'delete_autonomous_database', - 'restore_autonomous_database', - 'generate_autonomous_database_wallet', - 'list_autonomous_db_versions', - 'list_autonomous_database_character_sets', - 'list_autonomous_database_backups', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', + "list_cloud_exadata_infrastructures", + "get_cloud_exadata_infrastructure", + "create_cloud_exadata_infrastructure", + "delete_cloud_exadata_infrastructure", + "list_cloud_vm_clusters", + "get_cloud_vm_cluster", + "create_cloud_vm_cluster", + "delete_cloud_vm_cluster", + "list_entitlements", + "list_db_servers", + "list_db_nodes", + "list_gi_versions", + "list_db_system_shapes", + "list_autonomous_databases", + "get_autonomous_database", + "create_autonomous_database", + "delete_autonomous_database", + "restore_autonomous_database", + "generate_autonomous_database_wallet", + "list_autonomous_db_versions", + "list_autonomous_database_character_sets", + "list_autonomous_database_backups", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -7616,7 +9511,7 @@ def test_oracle_database_base_transport(): # Catch all for all remaining methods and properties remainder = [ - 'kind', + "kind", ] for r in remainder: with pytest.raises(NotImplementedError): @@ -7625,25 +9520,30 @@ def test_oracle_database_base_transport(): def test_oracle_database_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.oracledatabase_v1.services.oracle_database.transports.OracleDatabaseTransport._prep_wrapped_messages') as Transport: + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.oracledatabase_v1.services.oracle_database.transports.OracleDatabaseTransport._prep_wrapped_messages" + ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.OracleDatabaseTransport( credentials_file="credentials.json", quota_project_id="octopus", ) - load_creds.assert_called_once_with("credentials.json", + load_creds.assert_called_once_with( + "credentials.json", scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) def test_oracle_database_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.oracledatabase_v1.services.oracle_database.transports.OracleDatabaseTransport._prep_wrapped_messages') as Transport: + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.oracledatabase_v1.services.oracle_database.transports.OracleDatabaseTransport._prep_wrapped_messages" + ) as Transport: Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.OracleDatabaseTransport() @@ -7652,24 +9552,23 @@ def test_oracle_database_base_transport_with_adc(): def test_oracle_database_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) OracleDatabaseClient() adc.assert_called_once_with( scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id=None, ) def test_oracle_database_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.OracleDatabaseRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.OracleDatabaseRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) @@ -7677,7 +9576,7 @@ def test_oracle_database_http_transport_client_cert_source_for_mtls(): def test_oracle_database_rest_lro_client(): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) transport = client.transport @@ -7691,39 +9590,54 @@ def test_oracle_database_rest_lro_client(): assert transport.operations_client is transport.operations_client -@pytest.mark.parametrize("transport_name", [ - "rest", -]) +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) def test_oracle_database_host_no_port(transport_name): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='oracledatabase.googleapis.com'), - transport=transport_name, + client_options=client_options.ClientOptions( + api_endpoint="oracledatabase.googleapis.com" + ), + transport=transport_name, ) assert client.transport._host == ( - 'oracledatabase.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://oracledatabase.googleapis.com' + "oracledatabase.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://oracledatabase.googleapis.com" ) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) def test_oracle_database_host_with_port(transport_name): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='oracledatabase.googleapis.com:8000'), + client_options=client_options.ClientOptions( + api_endpoint="oracledatabase.googleapis.com:8000" + ), transport=transport_name, ) assert client.transport._host == ( - 'oracledatabase.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://oracledatabase.googleapis.com:8000' + "oracledatabase.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://oracledatabase.googleapis.com:8000" ) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) def test_oracle_database_client_transport_session_collision(transport_name): creds1 = ga_credentials.AnonymousCredentials() creds2 = ga_credentials.AnonymousCredentials() @@ -7802,12 +9716,19 @@ def test_oracle_database_client_transport_session_collision(transport_name): session2 = client2.transport.list_autonomous_database_backups._session assert session1 != session2 + def test_autonomous_database_path(): project = "squid" location = "clam" autonomous_database = "whelk" - expected = "projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}".format(project=project, location=location, autonomous_database=autonomous_database, ) - actual = OracleDatabaseClient.autonomous_database_path(project, location, autonomous_database) + expected = "projects/{project}/locations/{location}/autonomousDatabases/{autonomous_database}".format( + project=project, + location=location, + autonomous_database=autonomous_database, + ) + actual = OracleDatabaseClient.autonomous_database_path( + project, location, autonomous_database + ) assert expected == actual @@ -7823,12 +9744,19 @@ def test_parse_autonomous_database_path(): actual = OracleDatabaseClient.parse_autonomous_database_path(path) assert expected == actual + def test_autonomous_database_backup_path(): project = "cuttlefish" location = "mussel" autonomous_database_backup = "winkle" - expected = "projects/{project}/locations/{location}/autonomousDatabaseBackups/{autonomous_database_backup}".format(project=project, location=location, autonomous_database_backup=autonomous_database_backup, ) - actual = OracleDatabaseClient.autonomous_database_backup_path(project, location, autonomous_database_backup) + expected = "projects/{project}/locations/{location}/autonomousDatabaseBackups/{autonomous_database_backup}".format( + project=project, + location=location, + autonomous_database_backup=autonomous_database_backup, + ) + actual = OracleDatabaseClient.autonomous_database_backup_path( + project, location, autonomous_database_backup + ) assert expected == actual @@ -7844,12 +9772,19 @@ def test_parse_autonomous_database_backup_path(): actual = OracleDatabaseClient.parse_autonomous_database_backup_path(path) assert expected == actual + def test_autonomous_database_character_set_path(): project = "squid" location = "clam" autonomous_database_character_set = "whelk" - expected = "projects/{project}/locations/{location}/autonomousDatabaseCharacterSets/{autonomous_database_character_set}".format(project=project, location=location, autonomous_database_character_set=autonomous_database_character_set, ) - actual = OracleDatabaseClient.autonomous_database_character_set_path(project, location, autonomous_database_character_set) + expected = "projects/{project}/locations/{location}/autonomousDatabaseCharacterSets/{autonomous_database_character_set}".format( + project=project, + location=location, + autonomous_database_character_set=autonomous_database_character_set, + ) + actual = OracleDatabaseClient.autonomous_database_character_set_path( + project, location, autonomous_database_character_set + ) assert expected == actual @@ -7865,12 +9800,19 @@ def test_parse_autonomous_database_character_set_path(): actual = OracleDatabaseClient.parse_autonomous_database_character_set_path(path) assert expected == actual + def test_autonomous_db_version_path(): project = "cuttlefish" location = "mussel" autonomous_db_version = "winkle" - expected = "projects/{project}/locations/{location}/autonomousDbVersions/{autonomous_db_version}".format(project=project, location=location, autonomous_db_version=autonomous_db_version, ) - actual = OracleDatabaseClient.autonomous_db_version_path(project, location, autonomous_db_version) + expected = "projects/{project}/locations/{location}/autonomousDbVersions/{autonomous_db_version}".format( + project=project, + location=location, + autonomous_db_version=autonomous_db_version, + ) + actual = OracleDatabaseClient.autonomous_db_version_path( + project, location, autonomous_db_version + ) assert expected == actual @@ -7886,12 +9828,19 @@ def test_parse_autonomous_db_version_path(): actual = OracleDatabaseClient.parse_autonomous_db_version_path(path) assert expected == actual + def test_cloud_exadata_infrastructure_path(): project = "squid" location = "clam" cloud_exadata_infrastructure = "whelk" - expected = "projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}".format(project=project, location=location, cloud_exadata_infrastructure=cloud_exadata_infrastructure, ) - actual = OracleDatabaseClient.cloud_exadata_infrastructure_path(project, location, cloud_exadata_infrastructure) + expected = "projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}".format( + project=project, + location=location, + cloud_exadata_infrastructure=cloud_exadata_infrastructure, + ) + actual = OracleDatabaseClient.cloud_exadata_infrastructure_path( + project, location, cloud_exadata_infrastructure + ) assert expected == actual @@ -7907,12 +9856,19 @@ def test_parse_cloud_exadata_infrastructure_path(): actual = OracleDatabaseClient.parse_cloud_exadata_infrastructure_path(path) assert expected == actual + def test_cloud_vm_cluster_path(): project = "cuttlefish" location = "mussel" cloud_vm_cluster = "winkle" - expected = "projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}".format(project=project, location=location, cloud_vm_cluster=cloud_vm_cluster, ) - actual = OracleDatabaseClient.cloud_vm_cluster_path(project, location, cloud_vm_cluster) + expected = "projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}".format( + project=project, + location=location, + cloud_vm_cluster=cloud_vm_cluster, + ) + actual = OracleDatabaseClient.cloud_vm_cluster_path( + project, location, cloud_vm_cluster + ) assert expected == actual @@ -7928,13 +9884,21 @@ def test_parse_cloud_vm_cluster_path(): actual = OracleDatabaseClient.parse_cloud_vm_cluster_path(path) assert expected == actual + def test_db_node_path(): project = "squid" location = "clam" cloud_vm_cluster = "whelk" db_node = "octopus" - expected = "projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}/dbNodes/{db_node}".format(project=project, location=location, cloud_vm_cluster=cloud_vm_cluster, db_node=db_node, ) - actual = OracleDatabaseClient.db_node_path(project, location, cloud_vm_cluster, db_node) + expected = "projects/{project}/locations/{location}/cloudVmClusters/{cloud_vm_cluster}/dbNodes/{db_node}".format( + project=project, + location=location, + cloud_vm_cluster=cloud_vm_cluster, + db_node=db_node, + ) + actual = OracleDatabaseClient.db_node_path( + project, location, cloud_vm_cluster, db_node + ) assert expected == actual @@ -7951,13 +9915,21 @@ def test_parse_db_node_path(): actual = OracleDatabaseClient.parse_db_node_path(path) assert expected == actual + def test_db_server_path(): project = "winkle" location = "nautilus" cloud_exadata_infrastructure = "scallop" db_server = "abalone" - expected = "projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}/dbServers/{db_server}".format(project=project, location=location, cloud_exadata_infrastructure=cloud_exadata_infrastructure, db_server=db_server, ) - actual = OracleDatabaseClient.db_server_path(project, location, cloud_exadata_infrastructure, db_server) + expected = "projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}/dbServers/{db_server}".format( + project=project, + location=location, + cloud_exadata_infrastructure=cloud_exadata_infrastructure, + db_server=db_server, + ) + actual = OracleDatabaseClient.db_server_path( + project, location, cloud_exadata_infrastructure, db_server + ) assert expected == actual @@ -7974,12 +9946,19 @@ def test_parse_db_server_path(): actual = OracleDatabaseClient.parse_db_server_path(path) assert expected == actual + def test_db_system_shape_path(): project = "oyster" location = "nudibranch" db_system_shape = "cuttlefish" - expected = "projects/{project}/locations/{location}/dbSystemShapes/{db_system_shape}".format(project=project, location=location, db_system_shape=db_system_shape, ) - actual = OracleDatabaseClient.db_system_shape_path(project, location, db_system_shape) + expected = "projects/{project}/locations/{location}/dbSystemShapes/{db_system_shape}".format( + project=project, + location=location, + db_system_shape=db_system_shape, + ) + actual = OracleDatabaseClient.db_system_shape_path( + project, location, db_system_shape + ) assert expected == actual @@ -7995,11 +9974,18 @@ def test_parse_db_system_shape_path(): actual = OracleDatabaseClient.parse_db_system_shape_path(path) assert expected == actual + def test_entitlement_path(): project = "scallop" location = "abalone" entitlement = "squid" - expected = "projects/{project}/locations/{location}/entitlements/{entitlement}".format(project=project, location=location, entitlement=entitlement, ) + expected = ( + "projects/{project}/locations/{location}/entitlements/{entitlement}".format( + project=project, + location=location, + entitlement=entitlement, + ) + ) actual = OracleDatabaseClient.entitlement_path(project, location, entitlement) assert expected == actual @@ -8016,11 +10002,16 @@ def test_parse_entitlement_path(): actual = OracleDatabaseClient.parse_entitlement_path(path) assert expected == actual + def test_gi_version_path(): project = "oyster" location = "nudibranch" gi_version = "cuttlefish" - expected = "projects/{project}/locations/{location}/giVersions/{gi_version}".format(project=project, location=location, gi_version=gi_version, ) + expected = "projects/{project}/locations/{location}/giVersions/{gi_version}".format( + project=project, + location=location, + gi_version=gi_version, + ) actual = OracleDatabaseClient.gi_version_path(project, location, gi_version) assert expected == actual @@ -8037,10 +10028,14 @@ def test_parse_gi_version_path(): actual = OracleDatabaseClient.parse_gi_version_path(path) assert expected == actual + def test_network_path(): project = "scallop" network = "abalone" - expected = "projects/{project}/global/networks/{network}".format(project=project, network=network, ) + expected = "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) actual = OracleDatabaseClient.network_path(project, network) assert expected == actual @@ -8056,9 +10051,12 @@ def test_parse_network_path(): actual = OracleDatabaseClient.parse_network_path(path) assert expected == actual + def test_common_billing_account_path(): billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) actual = OracleDatabaseClient.common_billing_account_path(billing_account) assert expected == actual @@ -8073,9 +10071,12 @@ def test_parse_common_billing_account_path(): actual = OracleDatabaseClient.parse_common_billing_account_path(path) assert expected == actual + def test_common_folder_path(): folder = "oyster" - expected = "folders/{folder}".format(folder=folder, ) + expected = "folders/{folder}".format( + folder=folder, + ) actual = OracleDatabaseClient.common_folder_path(folder) assert expected == actual @@ -8090,9 +10091,12 @@ def test_parse_common_folder_path(): actual = OracleDatabaseClient.parse_common_folder_path(path) assert expected == actual + def test_common_organization_path(): organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization, ) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = OracleDatabaseClient.common_organization_path(organization) assert expected == actual @@ -8107,9 +10111,12 @@ def test_parse_common_organization_path(): actual = OracleDatabaseClient.parse_common_organization_path(path) assert expected == actual + def test_common_project_path(): project = "winkle" - expected = "projects/{project}".format(project=project, ) + expected = "projects/{project}".format( + project=project, + ) actual = OracleDatabaseClient.common_project_path(project) assert expected == actual @@ -8124,10 +10131,14 @@ def test_parse_common_project_path(): actual = OracleDatabaseClient.parse_common_project_path(path) assert expected == actual + def test_common_location_path(): project = "scallop" location = "abalone" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) actual = OracleDatabaseClient.common_location_path(project, location) assert expected == actual @@ -8147,14 +10158,18 @@ def test_parse_common_location_path(): def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() - with mock.patch.object(transports.OracleDatabaseTransport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.OracleDatabaseTransport, "_prep_wrapped_messages" + ) as prep: client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) - with mock.patch.object(transports.OracleDatabaseTransport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.OracleDatabaseTransport, "_prep_wrapped_messages" + ) as prep: transport_class = OracleDatabaseClient.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), @@ -8163,17 +10178,23 @@ def test_client_with_default_client_info(): prep.assert_called_once_with(client_info) -def test_get_location_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.GetLocationRequest): +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -8181,19 +10202,23 @@ def test_get_location_rest_bad_request(transport: str = 'rest', request_type=loc req.return_value = response_value client.get_location(request) -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) def test_get_location_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2'} + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = locations_pb2.Location() @@ -8202,7 +10227,7 @@ def test_get_location_rest(request_type): response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_location(request) @@ -8210,17 +10235,22 @@ def test_get_location_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.Location) -def test_list_locations_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.ListLocationsRequest): + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) + request = json_format.ParseDict({"name": "projects/sample1"}, request) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -8228,19 +10258,23 @@ def test_list_locations_rest_bad_request(transport: str = 'rest', request_type=l req.return_value = response_value client.list_locations(request) -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) def test_list_locations_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1'} + request_init = {"name": "projects/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = locations_pb2.ListLocationsResponse() @@ -8249,7 +10283,7 @@ def test_list_locations_rest(request_type): response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_locations(request) @@ -8257,17 +10291,24 @@ def test_list_locations_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) -def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -8275,28 +10316,32 @@ def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type req.return_value = response_value client.cancel_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) def test_cancel_operation_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = '{}' + json_return_value = "{}" - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.cancel_operation(request) @@ -8304,17 +10349,24 @@ def test_cancel_operation_rest(request_type): # Establish that the response is the type that we expect. assert response is None -def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.DeleteOperationRequest): + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -8322,28 +10374,32 @@ def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type req.return_value = response_value client.delete_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) def test_delete_operation_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = '{}' + json_return_value = "{}" - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_operation(request) @@ -8351,17 +10407,24 @@ def test_delete_operation_rest(request_type): # Establish that the response is the type that we expect. assert response is None -def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -8369,19 +10432,23 @@ def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=op req.return_value = response_value client.get_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) def test_get_operation_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.Operation() @@ -8390,7 +10457,7 @@ def test_get_operation_rest(request_type): response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_operation(request) @@ -8398,17 +10465,24 @@ def test_get_operation_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) -def test_list_operations_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.ListOperationsRequest): + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 400 @@ -8416,19 +10490,23 @@ def test_list_operations_rest_bad_request(transport: str = 'rest', request_type= req.return_value = response_value client.list_operations(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) def test_list_operations_rest(request_type): client = OracleDatabaseClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2'} + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.ListOperationsResponse() @@ -8437,7 +10515,7 @@ def test_list_operations_rest(request_type): response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_operations(request) @@ -8453,22 +10531,23 @@ def test_transport_close(): for transport, close_name in transports.items(): client = OracleDatabaseClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: with client: close.assert_not_called() close.assert_called_once() + def test_client_ctx(): transports = [ - 'rest', + "rest", ] for transport in transports: client = OracleDatabaseClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. with mock.patch.object(type(client.transport), "close") as close: @@ -8477,9 +10556,13 @@ def test_client_ctx(): pass close.assert_called() -@pytest.mark.parametrize("client_class,transport_class", [ - (OracleDatabaseClient, transports.OracleDatabaseRestTransport), -]) + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (OracleDatabaseClient, transports.OracleDatabaseRestTransport), + ], +) def test_api_key_credentials(client_class, transport_class): with mock.patch.object( google.auth._default, "get_api_key_credentials", create=True @@ -8494,7 +10577,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, From 9b2d04c6f75d981c22c1c7adfd37c492955e4fcd Mon Sep 17 00:00:00 2001 From: ohmayr Date: Mon, 23 Sep 2024 16:40:12 +0000 Subject: [PATCH 7/9] updates to repo metadata --- packages/google-cloud-oracledatabase/.flake8 | 2 +- .../.repo-metadata.json | 8 ++++---- .../google-cloud-oracledatabase/MANIFEST.in | 2 +- .../google-cloud-oracledatabase/README.rst | 20 +++++++++---------- .../google-cloud-oracledatabase/docs/conf.py | 2 +- .../docs/summary_overview.md | 6 +++--- .../google-cloud-oracledatabase/noxfile.py | 2 +- .../scripts/decrypt-secrets.sh | 2 +- 8 files changed, 22 insertions(+), 22 deletions(-) diff --git a/packages/google-cloud-oracledatabase/.flake8 b/packages/google-cloud-oracledatabase/.flake8 index 87f6e408c47d..32986c79287a 100644 --- a/packages/google-cloud-oracledatabase/.flake8 +++ b/packages/google-cloud-oracledatabase/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-oracledatabase/.repo-metadata.json b/packages/google-cloud-oracledatabase/.repo-metadata.json index 1dce499bb5ac..6d1b2164a92f 100644 --- a/packages/google-cloud-oracledatabase/.repo-metadata.json +++ b/packages/google-cloud-oracledatabase/.repo-metadata.json @@ -1,10 +1,10 @@ { "name": "google-cloud-oracledatabase", - "name_pretty": "", - "api_description": "", - "product_documentation": "", + "name_pretty": "Oracle Database@Google Cloud API", + "api_description": "The Oracle Database@Google Cloud API provides a set of APIs to manage Oracle database services, such as Exadata and Autonomous Databases.", + "product_documentation": "https://cloud.google.com/oracle/database/docs", "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-oracledatabase/latest", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1492565", "release_level": "preview", "language": "python", "library_type": "GAPIC_AUTO", diff --git a/packages/google-cloud-oracledatabase/MANIFEST.in b/packages/google-cloud-oracledatabase/MANIFEST.in index e0a66705318e..d6814cd60037 100644 --- a/packages/google-cloud-oracledatabase/MANIFEST.in +++ b/packages/google-cloud-oracledatabase/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-oracledatabase/README.rst b/packages/google-cloud-oracledatabase/README.rst index 3d0109a492fc..c050103f1ecd 100644 --- a/packages/google-cloud-oracledatabase/README.rst +++ b/packages/google-cloud-oracledatabase/README.rst @@ -1,9 +1,9 @@ -Python Client for -================== +Python Client for Oracle Database@Google Cloud API +================================================== |preview| |pypi| |versions| -``_: +`Oracle Database@Google Cloud API`_: The Oracle Database@Google Cloud API provides a set of APIs to manage Oracle database services, such as Exadata and Autonomous Databases. - `Client Library Documentation`_ - `Product Documentation`_ @@ -14,9 +14,9 @@ Python Client for :target: https://pypi.org/project/google-cloud-oracledatabase/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-oracledatabase.svg :target: https://pypi.org/project/google-cloud-oracledatabase/ -.. _: +.. _Oracle Database@Google Cloud API: https://cloud.google.com/oracle/database/docs .. _Client Library Documentation: https://cloud.google.com/python/docs/reference/google-cloud-oracledatabase/latest/summary_overview -.. _Product Documentation: +.. _Product Documentation: https://cloud.google.com/oracle/database/docs Quick Start ----------- @@ -25,12 +25,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ -3. `Enable the .`_ +3. `Enable the Oracle Database@Google Cloud API.`_ 4. `Setup Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Enable the .: +.. _Enable the Oracle Database@Google Cloud API.: https://cloud.google.com/oracle/database/docs .. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation @@ -97,12 +97,12 @@ Windows Next Steps ~~~~~~~~~~ -- Read the `Client Library Documentation`_ for +- Read the `Client Library Documentation`_ for Oracle Database@Google Cloud API to see other available methods on the client. -- Read the ` Product documentation`_ to learn +- Read the `Oracle Database@Google Cloud API Product documentation`_ to learn more about the product and see How-to Guides. - View this `README`_ to see the full list of Cloud APIs that we cover. -.. _ Product documentation: +.. _Oracle Database@Google Cloud API Product documentation: https://cloud.google.com/oracle/database/docs .. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-oracledatabase/docs/conf.py b/packages/google-cloud-oracledatabase/docs/conf.py index d3d4e9d5a5c4..a4b21f79d825 100644 --- a/packages/google-cloud-oracledatabase/docs/conf.py +++ b/packages/google-cloud-oracledatabase/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-oracledatabase/docs/summary_overview.md b/packages/google-cloud-oracledatabase/docs/summary_overview.md index 02a3fcd382b8..b1a49458a4c0 100644 --- a/packages/google-cloud-oracledatabase/docs/summary_overview.md +++ b/packages/google-cloud-oracledatabase/docs/summary_overview.md @@ -5,14 +5,14 @@ reverted. Instead, if you want to place additional content, create an pick up on the content and merge the content. ]: # -# API +# Oracle Database@Google Cloud API API -Overview of the APIs available for API. +Overview of the APIs available for Oracle Database@Google Cloud API API. ## All entries Classes, methods and properties & attributes for - API. +Oracle Database@Google Cloud API API. [classes](https://cloud.google.com/python/docs/reference/google-cloud-oracledatabase/latest/summary_class.html) diff --git a/packages/google-cloud-oracledatabase/noxfile.py b/packages/google-cloud-oracledatabase/noxfile.py index 67b7265f7586..aeee7851401a 100644 --- a/packages/google-cloud-oracledatabase/noxfile.py +++ b/packages/google-cloud-oracledatabase/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-oracledatabase/scripts/decrypt-secrets.sh b/packages/google-cloud-oracledatabase/scripts/decrypt-secrets.sh index 0018b421ddf8..120b0ddc4364 100755 --- a/packages/google-cloud-oracledatabase/scripts/decrypt-secrets.sh +++ b/packages/google-cloud-oracledatabase/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2023 Google LLC All rights reserved. +# Copyright 2024 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From ad5c1f96176930709f36b33a738068998c8f7f8b Mon Sep 17 00:00:00 2001 From: ohmayr Date: Mon, 23 Sep 2024 18:12:34 +0000 Subject: [PATCH 8/9] cleanup: summary overview repeated API suffix --- packages/google-cloud-oracledatabase/docs/summary_overview.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-oracledatabase/docs/summary_overview.md b/packages/google-cloud-oracledatabase/docs/summary_overview.md index b1a49458a4c0..326e6e99fa26 100644 --- a/packages/google-cloud-oracledatabase/docs/summary_overview.md +++ b/packages/google-cloud-oracledatabase/docs/summary_overview.md @@ -5,9 +5,9 @@ reverted. Instead, if you want to place additional content, create an pick up on the content and merge the content. ]: # -# Oracle Database@Google Cloud API API +# Oracle Database@Google Cloud API -Overview of the APIs available for Oracle Database@Google Cloud API API. +Overview of the APIs available for Oracle Database@Google Cloud API. ## All entries From e9f131be3cbd8c41403564cbce95a1cea3c15e59 Mon Sep 17 00:00:00 2001 From: ohmayr Date: Mon, 23 Sep 2024 20:21:25 +0000 Subject: [PATCH 9/9] fix: docs issue for oracledatabase --- .../oracledatabase_v1/types/exadata_infra.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/exadata_infra.py b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/exadata_infra.py index c57ed47f33ef..b023bb9f1f34 100644 --- a/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/exadata_infra.py +++ b/packages/google-cloud-oracledatabase/google/cloud/oracledatabase_v1/types/exadata_infra.py @@ -363,13 +363,12 @@ class MaintenanceWindow(proto.Message): Optional. The window of hours during the day when maintenance should be performed. The window is a 4 hour slot. Valid values are: - - 0 - represents time slot 0:00 - 3:59 UTC - 4 - represents time slot 4:00 - 7:59 UTC - 8 - represents time slot 8:00 - 11:59 UTC - 12 - represents time slot 12:00 - 15:59 UTC - 16 - represents time slot 16:00 - 19:59 UTC 20 - - represents time slot 20:00 - 23:59 UTC + 0 - represents time slot 0:00 - 3:59 UTC + 4 - represents time slot 4:00 - 7:59 UTC + 8 - represents time slot 8:00 - 11:59 UTC + 12 - represents time slot 12:00 - 15:59 UTC + 16 - represents time slot 16:00 - 19:59 UTC + 20 - represents time slot 20:00 - 23:59 UTC lead_time_week (int): Optional. Lead time window allows user to set a lead time to prepare for a down time. The lead @@ -378,7 +377,7 @@ class MaintenanceWindow(proto.Message): patching_mode (google.cloud.oracledatabase_v1.types.MaintenanceWindow.PatchingMode): Optional. Cloud CloudExadataInfrastructure node patching method, either "ROLLING" - or "NONROLLING". Default value is ROLLING. + or "NONROLLING". Default value is ROLLING. custom_action_timeout_mins (int): Optional. Determines the amount of time the system will wait before the start of each