From 0f3d38fc620620026ab4287d0e64fe63d1ca936a Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 16 Jan 2018 15:21:07 -0800 Subject: [PATCH 1/5] Import logging_v2 generated by artman Command log: ``` set DEST ~/workspace/google-cloud-python/logging set GOOGLEAPIS_REV fca7f0706769f8ece08121a87394b7c6e6d87687 cd /tmp git clone https://github.com/googleapis/googleapis.git cd googleapis git reset --hard $GOOGLEAPIS_REV artman --config google/logging/artman_logging.yaml generate python_gapic set SRC ./artman-genfiles/python/logging-v2/ set IMPORT_PKG logging_v2 cp -r $SRC/docs $DEST cp -r $SRC/google/cloud/$IMPORT_PKG $DEST/google/cloud/ mkdir -p $DEST/tests/unit/gapic cp -r $SRC/tests/unit/gapic $DEST/tests/unit mkdir -p $DEST/tests/system/gapic cp -r $SRC/tests/system/gapic $DEST/tests/system ``` --- logging/docs/conf.py | 310 ++++ logging/docs/gapic/v2/api.rst | 6 + logging/docs/gapic/v2/types.rst | 5 + logging/docs/index.rst | 99 ++ logging/google/cloud/logging_v2/__init__.py | 45 + .../google/cloud/logging_v2/gapic/__init__.py | 0 .../gapic/config_service_v2_client.py | 912 ++++++++++ .../gapic/config_service_v2_client_config.py | 82 + .../google/cloud/logging_v2/gapic/enums.py | 159 ++ .../gapic/logging_service_v2_client.py | 615 +++++++ .../gapic/logging_service_v2_client_config.py | 62 + .../gapic/metrics_service_v2_client.py | 465 +++++ .../gapic/metrics_service_v2_client_config.py | 48 + .../google/cloud/logging_v2/proto/__init__.py | 0 .../cloud/logging_v2/proto/log_entry_pb2.py | 508 ++++++ .../logging_v2/proto/log_entry_pb2_grpc.py | 3 + .../logging_v2/proto/logging_config_pb2.py | 1575 +++++++++++++++++ .../proto/logging_config_pb2_grpc.py | 211 +++ .../logging_v2/proto/logging_metrics_pb2.py | 895 ++++++++++ .../proto/logging_metrics_pb2_grpc.py | 115 ++ .../cloud/logging_v2/proto/logging_pb2.py | 1146 ++++++++++++ .../logging_v2/proto/logging_pb2_grpc.py | 128 ++ logging/google/cloud/logging_v2/types.py | 65 + .../v2/test_system_logging_service_v2_v2.py | 34 + .../v2/test_config_service_v2_client_v2.py | 460 +++++ .../v2/test_logging_service_v2_client_v2.py | 238 +++ .../v2/test_metrics_service_v2_client_v2.py | 256 +++ 27 files changed, 8442 insertions(+) create mode 100644 logging/docs/conf.py create mode 100644 logging/docs/gapic/v2/api.rst create mode 100644 logging/docs/gapic/v2/types.rst create mode 100644 logging/docs/index.rst create mode 100644 logging/google/cloud/logging_v2/__init__.py create mode 100644 logging/google/cloud/logging_v2/gapic/__init__.py create mode 100644 logging/google/cloud/logging_v2/gapic/config_service_v2_client.py create mode 100644 logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py create mode 100644 logging/google/cloud/logging_v2/gapic/enums.py create mode 100644 logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py create mode 100644 logging/google/cloud/logging_v2/gapic/logging_service_v2_client_config.py create mode 100644 logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py create mode 100644 logging/google/cloud/logging_v2/gapic/metrics_service_v2_client_config.py create mode 100644 logging/google/cloud/logging_v2/proto/__init__.py create mode 100644 logging/google/cloud/logging_v2/proto/log_entry_pb2.py create mode 100644 logging/google/cloud/logging_v2/proto/log_entry_pb2_grpc.py create mode 100644 logging/google/cloud/logging_v2/proto/logging_config_pb2.py create mode 100644 logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py create mode 100644 logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py create mode 100644 logging/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py create mode 100644 logging/google/cloud/logging_v2/proto/logging_pb2.py create mode 100644 logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py create mode 100644 logging/google/cloud/logging_v2/types.py create mode 100644 logging/tests/system/gapic/v2/test_system_logging_service_v2_v2.py create mode 100644 logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py create mode 100644 logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py create mode 100644 logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py diff --git a/logging/docs/conf.py b/logging/docs/conf.py new file mode 100644 index 000000000000..adaa0afcea45 --- /dev/null +++ b/logging/docs/conf.py @@ -0,0 +1,310 @@ +# -*- coding: utf-8 -*- +# +# google-cloud-logging documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath('..')) + +__version__ = '0.91.4' + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.autosummary', + 'sphinx.ext.intersphinx', + 'sphinx.ext.coverage', + 'sphinx.ext.napoleon', + 'sphinx.ext.viewcode', +] + +# autodoc/autosummary flags +autoclass_content = 'both' +autodoc_default_flags = ['members'] +autosummary_generate = True + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'google-cloud-logging' +copyright = u'2017, Google' +author = u'Google APIs' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = '.'.join(release.split('.')[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['_build'] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +#keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'sphinx_rtd_theme' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +# html_static_path = [] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +#html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +#html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +#html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +#html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = 'google-cloud-logging-doc' + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + #'preamble': '', + + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'google-cloud-logging.tex', + u'google-cloud-logging Documentation', author, 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [(master_doc, 'google-cloud-logging', + u'google-cloud-logging Documentation', [author], 1)] + +# If true, show URL addresses after external links. +#man_show_urls = False + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'google-cloud-logging', u'google-cloud-logging Documentation', + author, 'google-cloud-logging', + 'GAPIC library for the {metadata.shortName} v2 service', 'APIs'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +#texinfo_no_detailmenu = False + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + 'python': ('http://python.readthedocs.org/en/latest/', None), + 'gax': ('https://gax-python.readthedocs.org/en/latest/', None), +} + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/logging/docs/gapic/v2/api.rst b/logging/docs/gapic/v2/api.rst new file mode 100644 index 000000000000..2dc6bf6fcc6b --- /dev/null +++ b/logging/docs/gapic/v2/api.rst @@ -0,0 +1,6 @@ +Client for Stackdriver Logging API +================================== + +.. automodule:: google.cloud.logging_v2 + :members: + :inherited-members: \ No newline at end of file diff --git a/logging/docs/gapic/v2/types.rst b/logging/docs/gapic/v2/types.rst new file mode 100644 index 000000000000..5521d4f9bc12 --- /dev/null +++ b/logging/docs/gapic/v2/types.rst @@ -0,0 +1,5 @@ +Types for Stackdriver Logging API Client +======================================== + +.. automodule:: google.cloud.logging_v2.types + :members: \ No newline at end of file diff --git a/logging/docs/index.rst b/logging/docs/index.rst new file mode 100644 index 000000000000..5bb60aaf57ee --- /dev/null +++ b/logging/docs/index.rst @@ -0,0 +1,99 @@ +Python Client for Stackdriver Logging API (`Beta`_) +=================================================== + +`Stackdriver Logging API`_: Writes log entries and manages your Stackdriver Logging configuration. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. _Beta: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/README.rst +.. _Stackdriver Logging API: https://cloud.google.com/logging +.. _Client Library Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/logging/usage.html +.. _Product Documentation: https://cloud.google.com/logging + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Stackdriver Logging API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Stackdriver Logging API.: https://cloud.google.com/logging +.. _Setup Authentication.: https://googlecloudplatform.github.io/google-cloud-python/stable/core/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-logging + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-logging + +Preview +~~~~~~~ + +LoggingServiceV2Client +^^^^^^^^^^^^^^^^^^^^^^ + +.. code:: py + + from google.cloud import logging_v2 + + client = logging_v2.LoggingServiceV2Client() + + entries = [] + + response = client.write_log_entries(entries) + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Stackdriver Logging API + API to see other available methods on the client. +- Read the `Stackdriver Logging API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `repository’s main README`_ to see the full list of Cloud + APIs that we cover. + +.. _Stackdriver Logging API Product documentation: https://cloud.google.com/logging +.. _repository’s main README: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/README.rst + +Api Reference +------------- +.. toctree:: + :maxdepth: 2 + + gapic/v2/api + gapic/v2/types \ No newline at end of file diff --git a/logging/google/cloud/logging_v2/__init__.py b/logging/google/cloud/logging_v2/__init__.py new file mode 100644 index 000000000000..536e5dd24330 --- /dev/null +++ b/logging/google/cloud/logging_v2/__init__.py @@ -0,0 +1,45 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.logging_v2 import types +from google.cloud.logging_v2.gapic import config_service_v2_client +from google.cloud.logging_v2.gapic import enums +from google.cloud.logging_v2.gapic import logging_service_v2_client +from google.cloud.logging_v2.gapic import metrics_service_v2_client + + +class LoggingServiceV2Client(logging_service_v2_client.LoggingServiceV2Client): + __doc__ = logging_service_v2_client.LoggingServiceV2Client.__doc__ + enums = enums + + +class ConfigServiceV2Client(config_service_v2_client.ConfigServiceV2Client): + __doc__ = config_service_v2_client.ConfigServiceV2Client.__doc__ + enums = enums + + +class MetricsServiceV2Client(metrics_service_v2_client.MetricsServiceV2Client): + __doc__ = metrics_service_v2_client.MetricsServiceV2Client.__doc__ + enums = enums + + +__all__ = ( + 'enums', + 'types', + 'LoggingServiceV2Client', + 'ConfigServiceV2Client', + 'MetricsServiceV2Client', +) diff --git a/logging/google/cloud/logging_v2/gapic/__init__.py b/logging/google/cloud/logging_v2/gapic/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/logging/google/cloud/logging_v2/gapic/config_service_v2_client.py b/logging/google/cloud/logging_v2/gapic/config_service_v2_client.py new file mode 100644 index 000000000000..13e3693d403b --- /dev/null +++ b/logging/google/cloud/logging_v2/gapic/config_service_v2_client.py @@ -0,0 +1,912 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Accesses the google.logging.v2 ConfigServiceV2 API.""" + +import functools +import pkg_resources + +import google.api_core.gapic_v1.client_info +import google.api_core.gapic_v1.config +import google.api_core.gapic_v1.method +import google.api_core.grpc_helpers +import google.api_core.page_iterator +import google.api_core.path_template + +from google.api import monitored_resource_pb2 +from google.cloud.logging_v2.gapic import config_service_v2_client_config +from google.cloud.logging_v2.gapic import enums +from google.cloud.logging_v2.proto import log_entry_pb2 +from google.cloud.logging_v2.proto import logging_config_pb2 +from google.cloud.logging_v2.proto import logging_pb2 +from google.protobuf import field_mask_pb2 + +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + 'google-cloud-logging', ).version + + +class ConfigServiceV2Client(object): + """ + Service for configuring sinks used to export log entries outside of + Stackdriver Logging. + """ + + SERVICE_ADDRESS = 'logging.googleapis.com:443' + """The default address of the service.""" + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _DEFAULT_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + ) + + # The name of the interface for this client. This is the key used to find + # method configuration in the client_config dictionary. + _INTERFACE_NAME = 'google.logging.v2.ConfigServiceV2' + + @classmethod + def project_path(cls, project): + """Return a fully-qualified project string.""" + return google.api_core.path_template.expand( + 'projects/{project}', + project=project, + ) + + @classmethod + def sink_path(cls, project, sink): + """Return a fully-qualified sink string.""" + return google.api_core.path_template.expand( + 'projects/{project}/sinks/{sink}', + project=project, + sink=sink, + ) + + @classmethod + def exclusion_path(cls, project, exclusion): + """Return a fully-qualified exclusion string.""" + return google.api_core.path_template.expand( + 'projects/{project}/exclusions/{exclusion}', + project=project, + exclusion=exclusion, + ) + + def __init__(self, + channel=None, + credentials=None, + client_config=config_service_v2_client_config.config, + client_info=None): + """Constructor. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_config (dict): A dictionary of call options for each + method. If not specified, the default configuration is used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + 'The `channel` and `credentials` arguments to {} are mutually ' + 'exclusive.'.format(self.__class__.__name__), ) + + # Create the channel. + if channel is None: + channel = google.api_core.grpc_helpers.create_channel( + self.SERVICE_ADDRESS, + credentials=credentials, + scopes=self._DEFAULT_SCOPES, + ) + + # Create the gRPC stubs. + self.config_service_v2_stub = ( + logging_config_pb2.ConfigServiceV2Stub(channel)) + + if client_info is None: + client_info = ( + google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO) + client_info.gapic_version = _GAPIC_LIBRARY_VERSION + + # Parse out the default settings for retry and timeout for each RPC + # from the client configuration. + # (Ordinarily, these are the defaults specified in the `*_config.py` + # file next to this one.) + method_configs = google.api_core.gapic_v1.config.parse_method_configs( + client_config['interfaces'][self._INTERFACE_NAME], ) + + # Write the "inner API call" methods to the class. + # These are wrapped versions of the gRPC stub methods, with retry and + # timeout configuration applied, called by the public methods on + # this class. + self._list_sinks = google.api_core.gapic_v1.method.wrap_method( + self.config_service_v2_stub.ListSinks, + default_retry=method_configs['ListSinks'].retry, + default_timeout=method_configs['ListSinks'].timeout, + client_info=client_info, + ) + self._get_sink = google.api_core.gapic_v1.method.wrap_method( + self.config_service_v2_stub.GetSink, + default_retry=method_configs['GetSink'].retry, + default_timeout=method_configs['GetSink'].timeout, + client_info=client_info, + ) + self._create_sink = google.api_core.gapic_v1.method.wrap_method( + self.config_service_v2_stub.CreateSink, + default_retry=method_configs['CreateSink'].retry, + default_timeout=method_configs['CreateSink'].timeout, + client_info=client_info, + ) + self._update_sink = google.api_core.gapic_v1.method.wrap_method( + self.config_service_v2_stub.UpdateSink, + default_retry=method_configs['UpdateSink'].retry, + default_timeout=method_configs['UpdateSink'].timeout, + client_info=client_info, + ) + self._delete_sink = google.api_core.gapic_v1.method.wrap_method( + self.config_service_v2_stub.DeleteSink, + default_retry=method_configs['DeleteSink'].retry, + default_timeout=method_configs['DeleteSink'].timeout, + client_info=client_info, + ) + self._list_exclusions = google.api_core.gapic_v1.method.wrap_method( + self.config_service_v2_stub.ListExclusions, + default_retry=method_configs['ListExclusions'].retry, + default_timeout=method_configs['ListExclusions'].timeout, + client_info=client_info, + ) + self._get_exclusion = google.api_core.gapic_v1.method.wrap_method( + self.config_service_v2_stub.GetExclusion, + default_retry=method_configs['GetExclusion'].retry, + default_timeout=method_configs['GetExclusion'].timeout, + client_info=client_info, + ) + self._create_exclusion = google.api_core.gapic_v1.method.wrap_method( + self.config_service_v2_stub.CreateExclusion, + default_retry=method_configs['CreateExclusion'].retry, + default_timeout=method_configs['CreateExclusion'].timeout, + client_info=client_info, + ) + self._update_exclusion = google.api_core.gapic_v1.method.wrap_method( + self.config_service_v2_stub.UpdateExclusion, + default_retry=method_configs['UpdateExclusion'].retry, + default_timeout=method_configs['UpdateExclusion'].timeout, + client_info=client_info, + ) + self._delete_exclusion = google.api_core.gapic_v1.method.wrap_method( + self.config_service_v2_stub.DeleteExclusion, + default_retry=method_configs['DeleteExclusion'].retry, + default_timeout=method_configs['DeleteExclusion'].timeout, + client_info=client_info, + ) + + # Service calls + def list_sinks(self, + parent, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Lists sinks. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.ConfigServiceV2Client() + >>> + >>> parent = client.project_path('[PROJECT]') + >>> + >>> + >>> # Iterate over all results + >>> for element in client.list_sinks(parent): + ... # process element + ... pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_sinks(parent, options=CallOptions(page_token=INITIAL_PAGE)): + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): Required. The parent resource whose sinks are to be listed: + + :: + + \"projects/[PROJECT_ID]\" + \"organizations/[ORGANIZATION_ID]\" + \"billingAccounts/[BILLING_ACCOUNT_ID]\" + \"folders/[FOLDER_ID]\" + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.gax.PageIterator` instance. By default, this + is an iterable of :class:`~google.cloud.logging_v2.types.LogSink` instances. + This object can also be configured to iterate over the pages + of the response through the `options` parameter. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_config_pb2.ListSinksRequest( + parent=parent, + page_size=page_size, + ) + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._list_sinks, + retry=retry, + timeout=timeout, + metadata=metadata), + request=request, + items_field='sinks', + request_token_field='page_token', + response_token_field='next_page_token', + ) + return iterator + + def get_sink(self, + sink_name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Gets a sink. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.ConfigServiceV2Client() + >>> + >>> sink_name = client.sink_path('[PROJECT]', '[SINK]') + >>> + >>> response = client.get_sink(sink_name) + + Args: + sink_name (str): Required. The resource name of the sink: + + :: + + \"projects/[PROJECT_ID]/sinks/[SINK_ID]\" + \"organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]\" + \"billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]\" + \"folders/[FOLDER_ID]/sinks/[SINK_ID]\" + + Example: ``\"projects/my-project-id/sinks/my-sink-id\"``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.logging_v2.types.LogSink` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_config_pb2.GetSinkRequest(sink_name=sink_name, ) + return self._get_sink( + request, retry=retry, timeout=timeout, metadata=metadata) + + def create_sink(self, + parent, + sink, + unique_writer_identity=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Creates a sink that exports specified log entries to a destination. The + export of newly-ingested log entries begins immediately, unless the sink's + ``writer_identity`` is not permitted to write to the destination. A sink can + export log entries only from the resource owning the sink. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.ConfigServiceV2Client() + >>> + >>> parent = client.project_path('[PROJECT]') + >>> sink = {} + >>> + >>> response = client.create_sink(parent, sink) + + Args: + parent (str): Required. The resource in which to create the sink: + + :: + + \"projects/[PROJECT_ID]\" + \"organizations/[ORGANIZATION_ID]\" + \"billingAccounts/[BILLING_ACCOUNT_ID]\" + \"folders/[FOLDER_ID]\" + + Examples: ``\"projects/my-logging-project\"``, ``\"organizations/123456789\"``. + sink (Union[dict, ~google.cloud.logging_v2.types.LogSink]): Required. The new sink, whose ``name`` parameter is a sink identifier that + is not already in use. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.logging_v2.types.LogSink` + unique_writer_identity (bool): Optional. Determines the kind of IAM identity returned as ``writer_identity`` + in the new sink. If this value is omitted or set to false, and if the + sink's parent is a project, then the value returned as ``writer_identity`` is + the same group or service account used by Stackdriver Logging before the + addition of writer identities to this API. The sink's destination must be + in the same project as the sink itself. + + If this field is set to true, or if the sink is owned by a non-project + resource such as an organization, then the value of ``writer_identity`` will + be a unique service account used only for exports from the new sink. For + more information, see ``writer_identity`` in ``LogSink``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.logging_v2.types.LogSink` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_config_pb2.CreateSinkRequest( + parent=parent, + sink=sink, + unique_writer_identity=unique_writer_identity, + ) + return self._create_sink( + request, retry=retry, timeout=timeout, metadata=metadata) + + def update_sink(self, + sink_name, + sink, + unique_writer_identity=None, + update_mask=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Updates a sink. This method replaces the following fields in the existing + sink with values from the new sink: ``destination``, and ``filter``. + The updated sink might also have a new ``writer_identity``; see the + ``unique_writer_identity`` field. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.ConfigServiceV2Client() + >>> + >>> sink_name = client.sink_path('[PROJECT]', '[SINK]') + >>> sink = {} + >>> + >>> response = client.update_sink(sink_name, sink) + + Args: + sink_name (str): Required. The full resource name of the sink to update, including the + parent resource and the sink identifier: + + :: + + \"projects/[PROJECT_ID]/sinks/[SINK_ID]\" + \"organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]\" + \"billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]\" + \"folders/[FOLDER_ID]/sinks/[SINK_ID]\" + + Example: ``\"projects/my-project-id/sinks/my-sink-id\"``. + sink (Union[dict, ~google.cloud.logging_v2.types.LogSink]): Required. The updated sink, whose name is the same identifier that appears + as part of ``sink_name``. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.logging_v2.types.LogSink` + unique_writer_identity (bool): Optional. See + `sinks.create `_ + for a description of this field. When updating a sink, the effect of this + field on the value of ``writer_identity`` in the updated sink depends on both + the old and new values of this field: + + + If the old and new values of this field are both false or both true, + :: + + then there is no change to the sink's `writer_identity`. + + If the old value is false and the new value is true, then + :: + + `writer_identity` is changed to a unique service account. + + It is an error if the old value is true and the new value is + :: + + set to false or defaulted to false. + update_mask (Union[dict, ~google.cloud.logging_v2.types.FieldMask]): Optional. Field mask that specifies the fields in ``sink`` that need + an update. A sink field will be overwritten if, and only if, it is + in the update mask. ``name`` and output only fields cannot be updated. + + An empty updateMask is temporarily treated as using the following mask + for backwards compatibility purposes: + destination,filter,includeChildren + At some point in the future, behavior will be removed and specifying an + empty updateMask will be an error. + + For a detailed ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + + Example: ``updateMask=filter``. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.logging_v2.types.FieldMask` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.logging_v2.types.LogSink` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_config_pb2.UpdateSinkRequest( + sink_name=sink_name, + sink=sink, + unique_writer_identity=unique_writer_identity, + update_mask=update_mask, + ) + return self._update_sink( + request, retry=retry, timeout=timeout, metadata=metadata) + + def delete_sink(self, + sink_name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Deletes a sink. If the sink has a unique ``writer_identity``, then that + service account is also deleted. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.ConfigServiceV2Client() + >>> + >>> sink_name = client.sink_path('[PROJECT]', '[SINK]') + >>> + >>> client.delete_sink(sink_name) + + Args: + sink_name (str): Required. The full resource name of the sink to delete, including the + parent resource and the sink identifier: + + :: + + \"projects/[PROJECT_ID]/sinks/[SINK_ID]\" + \"organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]\" + \"billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]\" + \"folders/[FOLDER_ID]/sinks/[SINK_ID]\" + + Example: ``\"projects/my-project-id/sinks/my-sink-id\"``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_config_pb2.DeleteSinkRequest(sink_name=sink_name, ) + self._delete_sink( + request, retry=retry, timeout=timeout, metadata=metadata) + + def list_exclusions(self, + parent, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Lists all the exclusions in a parent resource. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.ConfigServiceV2Client() + >>> + >>> parent = client.project_path('[PROJECT]') + >>> + >>> + >>> # Iterate over all results + >>> for element in client.list_exclusions(parent): + ... # process element + ... pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_exclusions(parent, options=CallOptions(page_token=INITIAL_PAGE)): + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): Required. The parent resource whose exclusions are to be listed. + + :: + + \"projects/[PROJECT_ID]\" + \"organizations/[ORGANIZATION_ID]\" + \"billingAccounts/[BILLING_ACCOUNT_ID]\" + \"folders/[FOLDER_ID]\" + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.gax.PageIterator` instance. By default, this + is an iterable of :class:`~google.cloud.logging_v2.types.LogExclusion` instances. + This object can also be configured to iterate over the pages + of the response through the `options` parameter. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_config_pb2.ListExclusionsRequest( + parent=parent, + page_size=page_size, + ) + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._list_exclusions, + retry=retry, + timeout=timeout, + metadata=metadata), + request=request, + items_field='exclusions', + request_token_field='page_token', + response_token_field='next_page_token', + ) + return iterator + + def get_exclusion(self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Gets the description of an exclusion. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.ConfigServiceV2Client() + >>> + >>> name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') + >>> + >>> response = client.get_exclusion(name) + + Args: + name (str): Required. The resource name of an existing exclusion: + + :: + + \"projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]\" + \"organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]\" + \"billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]\" + \"folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]\" + + Example: ``\"projects/my-project-id/exclusions/my-exclusion-id\"``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.logging_v2.types.LogExclusion` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_config_pb2.GetExclusionRequest(name=name, ) + return self._get_exclusion( + request, retry=retry, timeout=timeout, metadata=metadata) + + def create_exclusion(self, + parent, + exclusion, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Creates a new exclusion in a specified parent resource. + Only log entries belonging to that resource can be excluded. + You can have up to 10 exclusions in a resource. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.ConfigServiceV2Client() + >>> + >>> parent = client.project_path('[PROJECT]') + >>> exclusion = {} + >>> + >>> response = client.create_exclusion(parent, exclusion) + + Args: + parent (str): Required. The parent resource in which to create the exclusion: + + :: + + \"projects/[PROJECT_ID]\" + \"organizations/[ORGANIZATION_ID]\" + \"billingAccounts/[BILLING_ACCOUNT_ID]\" + \"folders/[FOLDER_ID]\" + + Examples: ``\"projects/my-logging-project\"``, ``\"organizations/123456789\"``. + exclusion (Union[dict, ~google.cloud.logging_v2.types.LogExclusion]): Required. The new exclusion, whose ``name`` parameter is an exclusion name + that is not already used in the parent resource. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.logging_v2.types.LogExclusion` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.logging_v2.types.LogExclusion` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_config_pb2.CreateExclusionRequest( + parent=parent, + exclusion=exclusion, + ) + return self._create_exclusion( + request, retry=retry, timeout=timeout, metadata=metadata) + + def update_exclusion(self, + name, + exclusion, + update_mask, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Changes one or more properties of an existing exclusion. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.ConfigServiceV2Client() + >>> + >>> name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') + >>> exclusion = {} + >>> update_mask = {} + >>> + >>> response = client.update_exclusion(name, exclusion, update_mask) + + Args: + name (str): Required. The resource name of the exclusion to update: + + :: + + \"projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]\" + \"organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]\" + \"billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]\" + \"folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]\" + + Example: ``\"projects/my-project-id/exclusions/my-exclusion-id\"``. + exclusion (Union[dict, ~google.cloud.logging_v2.types.LogExclusion]): Required. New values for the existing exclusion. Only the fields specified + in ``update_mask`` are relevant. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.logging_v2.types.LogExclusion` + update_mask (Union[dict, ~google.cloud.logging_v2.types.FieldMask]): Required. A nonempty list of fields to change in the existing exclusion. + New values for the fields are taken from the corresponding fields in the + ``LogExclusion`` included in this request. Fields not mentioned in + ``update_mask`` are not changed and are ignored in the request. + + For example, to change the filter and description of an exclusion, + specify an ``update_mask`` of ``\"filter,description\"``. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.logging_v2.types.FieldMask` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.logging_v2.types.LogExclusion` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_config_pb2.UpdateExclusionRequest( + name=name, + exclusion=exclusion, + update_mask=update_mask, + ) + return self._update_exclusion( + request, retry=retry, timeout=timeout, metadata=metadata) + + def delete_exclusion(self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Deletes an exclusion. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.ConfigServiceV2Client() + >>> + >>> name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') + >>> + >>> client.delete_exclusion(name) + + Args: + name (str): Required. The resource name of an existing exclusion to delete: + + :: + + \"projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]\" + \"organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]\" + \"billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]\" + \"folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]\" + + Example: ``\"projects/my-project-id/exclusions/my-exclusion-id\"``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_config_pb2.DeleteExclusionRequest(name=name, ) + self._delete_exclusion( + request, retry=retry, timeout=timeout, metadata=metadata) diff --git a/logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py b/logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py new file mode 100644 index 000000000000..bc8363c6f3f2 --- /dev/null +++ b/logging/google/cloud/logging_v2/gapic/config_service_v2_client_config.py @@ -0,0 +1,82 @@ +config = { + "interfaces": { + "google.logging.v2.ConfigServiceV2": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"], + "non_idempotent": [] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.2, + "max_retry_delay_millis": 1000, + "initial_rpc_timeout_millis": 30000, + "rpc_timeout_multiplier": 1.5, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 90000 + }, + "write_sink": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.2, + "max_retry_delay_millis": 1000, + "initial_rpc_timeout_millis": 30000, + "rpc_timeout_multiplier": 1.5, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 120000 + } + }, + "methods": { + "ListSinks": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "GetSink": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "CreateSink": { + "timeout_millis": 120000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "UpdateSink": { + "timeout_millis": 120000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "DeleteSink": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ListExclusions": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "GetExclusion": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "CreateExclusion": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "UpdateExclusion": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "DeleteExclusion": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/logging/google/cloud/logging_v2/gapic/enums.py b/logging/google/cloud/logging_v2/gapic/enums.py new file mode 100644 index 000000000000..47212e5bc29f --- /dev/null +++ b/logging/google/cloud/logging_v2/gapic/enums.py @@ -0,0 +1,159 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Wrappers for protocol buffer enum types.""" + + +class LogSeverity(object): + """ + The severity of the event described in a log entry, expressed as one of the + standard severity levels listed below. For your reference, the levels are + assigned the listed numeric values. The effect of using numeric values other + than those listed is undefined. + + You can filter for log entries by severity. For example, the following + filter expression will match log entries with severities ``INFO``, ``NOTICE``, + and ``WARNING``: + + :: + + severity > DEBUG AND severity <= WARNING + + If you are writing log entries, you should map other severity encodings to + one of these standard levels. For example, you might map all of Java's FINE, + FINER, and FINEST levels to ``LogSeverity.DEBUG``. You can preserve the + original severity level in the log entry payload if you wish. + + Attributes: + DEFAULT (int): (0) The log entry has no assigned severity level. + DEBUG (int): (100) Debug or trace information. + INFO (int): (200) Routine information, such as ongoing status or performance. + NOTICE (int): (300) Normal but significant events, such as start up, shut down, or + a configuration change. + WARNING (int): (400) Warning events might cause problems. + ERROR (int): (500) Error events are likely to cause problems. + CRITICAL (int): (600) Critical events cause more severe problems or outages. + ALERT (int): (700) A person must take an action immediately. + EMERGENCY (int): (800) One or more systems are unusable. + """ + DEFAULT = 0 + DEBUG = 100 + INFO = 200 + NOTICE = 300 + WARNING = 400 + ERROR = 500 + CRITICAL = 600 + ALERT = 700 + EMERGENCY = 800 + + +class NullValue(object): + """ + ``NullValue`` is a singleton enumeration to represent the null value for the + ``Value`` type union. + + The JSON representation for ``NullValue`` is JSON ``null``. + + Attributes: + NULL_VALUE (int): Null value. + """ + NULL_VALUE = 0 + + +class LabelDescriptor(object): + class ValueType(object): + """ + Value types that can be used as label values. + + Attributes: + STRING (int): A variable-length string. This is the default. + BOOL (int): Boolean; true or false. + INT64 (int): A 64-bit signed integer. + """ + STRING = 0 + BOOL = 1 + INT64 = 2 + + +class LogSink(object): + class VersionFormat(object): + """ + Available log entry formats. Log entries can be written to Stackdriver + Logging in either format and can be exported in either format. + Version 2 is the preferred format. + + Attributes: + VERSION_FORMAT_UNSPECIFIED (int): An unspecified format version that will default to V2. + V2 (int): ``LogEntry`` version 2 format. + V1 (int): ``LogEntry`` version 1 format. + """ + VERSION_FORMAT_UNSPECIFIED = 0 + V2 = 1 + V1 = 2 + + +class MetricDescriptor(object): + class MetricKind(object): + """ + The kind of measurement. It describes how the data is reported. + + Attributes: + METRIC_KIND_UNSPECIFIED (int): Do not use this default value. + GAUGE (int): An instantaneous measurement of a value. + DELTA (int): The change in a value during a time interval. + CUMULATIVE (int): A value accumulated over a time interval. Cumulative + measurements in a time series should have the same start time + and increasing end times, until an event resets the cumulative + value to zero and sets a new start time for the following + points. + """ + METRIC_KIND_UNSPECIFIED = 0 + GAUGE = 1 + DELTA = 2 + CUMULATIVE = 3 + + class ValueType(object): + """ + The value type of a metric. + + Attributes: + VALUE_TYPE_UNSPECIFIED (int): Do not use this default value. + BOOL (int): The value is a boolean. + This value type can be used only if the metric kind is ``GAUGE``. + INT64 (int): The value is a signed 64-bit integer. + DOUBLE (int): The value is a double precision floating point number. + STRING (int): The value is a text string. + This value type can be used only if the metric kind is ``GAUGE``. + DISTRIBUTION (int): The value is a ````Distribution````. + MONEY (int): The value is money. + """ + VALUE_TYPE_UNSPECIFIED = 0 + BOOL = 1 + INT64 = 2 + DOUBLE = 3 + STRING = 4 + DISTRIBUTION = 5 + MONEY = 6 + + +class LogMetric(object): + class ApiVersion(object): + """ + Stackdriver Logging API version. + + Attributes: + V2 (int): Stackdriver Logging API v2. + V1 (int): Stackdriver Logging API v1. + """ + V2 = 0 + V1 = 1 diff --git a/logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py b/logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py new file mode 100644 index 000000000000..43d5de0d240f --- /dev/null +++ b/logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py @@ -0,0 +1,615 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Accesses the google.logging.v2 LoggingServiceV2 API.""" + +import functools +import pkg_resources + +import google.api_core.gapic_v1.client_info +import google.api_core.gapic_v1.config +import google.api_core.gapic_v1.method +import google.api_core.grpc_helpers +import google.api_core.page_iterator +import google.api_core.path_template + +from google.api import monitored_resource_pb2 +from google.cloud.logging_v2.gapic import enums +from google.cloud.logging_v2.gapic import logging_service_v2_client_config +from google.cloud.logging_v2.proto import log_entry_pb2 +from google.cloud.logging_v2.proto import logging_pb2 + +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + 'google-cloud-logging', ).version + + +class LoggingServiceV2Client(object): + """Service for ingesting and querying logs.""" + + SERVICE_ADDRESS = 'logging.googleapis.com:443' + """The default address of the service.""" + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _DEFAULT_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + ) + + # The name of the interface for this client. This is the key used to find + # method configuration in the client_config dictionary. + _INTERFACE_NAME = 'google.logging.v2.LoggingServiceV2' + + @classmethod + def project_path(cls, project): + """Return a fully-qualified project string.""" + return google.api_core.path_template.expand( + 'projects/{project}', + project=project, + ) + + @classmethod + def log_path(cls, project, log): + """Return a fully-qualified log string.""" + return google.api_core.path_template.expand( + 'projects/{project}/logs/{log}', + project=project, + log=log, + ) + + def __init__(self, + channel=None, + credentials=None, + client_config=logging_service_v2_client_config.config, + client_info=None): + """Constructor. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_config (dict): A dictionary of call options for each + method. If not specified, the default configuration is used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + 'The `channel` and `credentials` arguments to {} are mutually ' + 'exclusive.'.format(self.__class__.__name__), ) + + # Create the channel. + if channel is None: + channel = google.api_core.grpc_helpers.create_channel( + self.SERVICE_ADDRESS, + credentials=credentials, + scopes=self._DEFAULT_SCOPES, + ) + + # Create the gRPC stubs. + self.logging_service_v2_stub = ( + logging_pb2.LoggingServiceV2Stub(channel)) + + if client_info is None: + client_info = ( + google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO) + client_info.gapic_version = _GAPIC_LIBRARY_VERSION + + # Parse out the default settings for retry and timeout for each RPC + # from the client configuration. + # (Ordinarily, these are the defaults specified in the `*_config.py` + # file next to this one.) + method_configs = google.api_core.gapic_v1.config.parse_method_configs( + client_config['interfaces'][self._INTERFACE_NAME], ) + + # Write the "inner API call" methods to the class. + # These are wrapped versions of the gRPC stub methods, with retry and + # timeout configuration applied, called by the public methods on + # this class. + self._delete_log = google.api_core.gapic_v1.method.wrap_method( + self.logging_service_v2_stub.DeleteLog, + default_retry=method_configs['DeleteLog'].retry, + default_timeout=method_configs['DeleteLog'].timeout, + client_info=client_info, + ) + self._write_log_entries = google.api_core.gapic_v1.method.wrap_method( + self.logging_service_v2_stub.WriteLogEntries, + default_retry=method_configs['WriteLogEntries'].retry, + default_timeout=method_configs['WriteLogEntries'].timeout, + client_info=client_info, + ) + self._list_log_entries = google.api_core.gapic_v1.method.wrap_method( + self.logging_service_v2_stub.ListLogEntries, + default_retry=method_configs['ListLogEntries'].retry, + default_timeout=method_configs['ListLogEntries'].timeout, + client_info=client_info, + ) + self._list_monitored_resource_descriptors = google.api_core.gapic_v1.method.wrap_method( + self.logging_service_v2_stub.ListMonitoredResourceDescriptors, + default_retry=method_configs[ + 'ListMonitoredResourceDescriptors'].retry, + default_timeout=method_configs['ListMonitoredResourceDescriptors'] + .timeout, + client_info=client_info, + ) + self._list_logs = google.api_core.gapic_v1.method.wrap_method( + self.logging_service_v2_stub.ListLogs, + default_retry=method_configs['ListLogs'].retry, + default_timeout=method_configs['ListLogs'].timeout, + client_info=client_info, + ) + + # Service calls + def delete_log(self, + log_name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Deletes all the log entries in a log. + The log reappears if it receives new entries. + Log entries written shortly before the delete operation might not be + deleted. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.LoggingServiceV2Client() + >>> + >>> log_name = client.log_path('[PROJECT]', '[LOG]') + >>> + >>> client.delete_log(log_name) + + Args: + log_name (str): Required. The resource name of the log to delete: + + :: + + \"projects/[PROJECT_ID]/logs/[LOG_ID]\" + \"organizations/[ORGANIZATION_ID]/logs/[LOG_ID]\" + \"billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]\" + \"folders/[FOLDER_ID]/logs/[LOG_ID]\" + + ``[LOG_ID]`` must be URL-encoded. For example, + ``\"projects/my-project-id/logs/syslog\"``, + ``\"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity\"``. + For more information about log names, see + ``LogEntry``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_pb2.DeleteLogRequest(log_name=log_name, ) + self._delete_log( + request, retry=retry, timeout=timeout, metadata=metadata) + + def write_log_entries(self, + entries, + log_name=None, + resource=None, + labels=None, + partial_success=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + ## Log entry resources + + Writes log entries to Stackdriver Logging. This API method is the + only way to send log entries to Stackdriver Logging. This method + is used, directly or indirectly, by the Stackdriver Logging agent + (fluentd) and all logging libraries configured to use Stackdriver + Logging. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.LoggingServiceV2Client() + >>> + >>> entries = [] + >>> + >>> response = client.write_log_entries(entries) + + Args: + entries (list[Union[dict, ~google.cloud.logging_v2.types.LogEntry]]): Required. The log entries to send to Stackdriver Logging. The order of log + entries in this list does not matter. Values supplied in this method's + ``log_name``, ``resource``, and ``labels`` fields are copied into those log + entries in this list that do not include values for their corresponding + fields. For more information, see the ``LogEntry`` type. + + If the ``timestamp`` or ``insert_id`` fields are missing in log entries, then + this method supplies the current time or a unique identifier, respectively. + The supplied values are chosen so that, among the log entries that did not + supply their own values, the entries earlier in the list will sort before + the entries later in the list. See the ``entries.list`` method. + + Log entries with timestamps that are more than the + `logs retention period `_ in the past or more than + 24 hours in the future might be discarded. Discarding does not return + an error. + + To improve throughput and to avoid exceeding the + `quota limit `_ for calls to ``entries.write``, + you should try to include several log entries in this list, + rather than calling this method for each individual log entry. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.logging_v2.types.LogEntry` + log_name (str): Optional. A default log resource name that is assigned to all log entries + in ``entries`` that do not specify a value for ``log_name``: + + :: + + \"projects/[PROJECT_ID]/logs/[LOG_ID]\" + \"organizations/[ORGANIZATION_ID]/logs/[LOG_ID]\" + \"billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]\" + \"folders/[FOLDER_ID]/logs/[LOG_ID]\" + + ``[LOG_ID]`` must be URL-encoded. For example, + ``\"projects/my-project-id/logs/syslog\"`` or + ``\"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity\"``. + For more information about log names, see + ``LogEntry``. + resource (Union[dict, ~google.cloud.logging_v2.types.MonitoredResource]): Optional. A default monitored resource object that is assigned to all log + entries in ``entries`` that do not specify a value for ``resource``. Example: + + :: + + { \"type\": \"gce_instance\", + \"labels\": { + \"zone\": \"us-central1-a\", \"instance_id\": \"00000000000000000000\" }} + + See ``LogEntry``. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.logging_v2.types.MonitoredResource` + labels (dict[str -> str]): Optional. Default labels that are added to the ``labels`` field of all log + entries in ``entries``. If a log entry already has a label with the same key + as a label in this parameter, then the log entry's label is not changed. + See ``LogEntry``. + partial_success (bool): Optional. Whether valid entries should be written even if some other + entries fail due to INVALID_ARGUMENT or PERMISSION_DENIED errors. If any + entry is not written, then the response status is the error associated + with one of the failed entries and the response includes error details + keyed by the entries' zero-based index in the ``entries.write`` method. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.logging_v2.types.WriteLogEntriesResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_pb2.WriteLogEntriesRequest( + entries=entries, + log_name=log_name, + resource=resource, + labels=labels, + partial_success=partial_success, + ) + return self._write_log_entries( + request, retry=retry, timeout=timeout, metadata=metadata) + + def list_log_entries(self, + resource_names, + project_ids=None, + filter_=None, + order_by=None, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Lists log entries. Use this method to retrieve log entries from + Stackdriver Logging. For ways to export log entries, see + `Exporting Logs `_. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.LoggingServiceV2Client() + >>> + >>> resource_names = [] + >>> + >>> + >>> # Iterate over all results + >>> for element in client.list_log_entries(resource_names): + ... # process element + ... pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_log_entries(resource_names, options=CallOptions(page_token=INITIAL_PAGE)): + ... for element in page: + ... # process element + ... pass + + Args: + resource_names (list[str]): Required. Names of one or more parent resources from which to + retrieve log entries: + + :: + + \"projects/[PROJECT_ID]\" + \"organizations/[ORGANIZATION_ID]\" + \"billingAccounts/[BILLING_ACCOUNT_ID]\" + \"folders/[FOLDER_ID]\" + + Projects listed in the ``project_ids`` field are added to this list. + project_ids (list[str]): Deprecated. Use ``resource_names`` instead. One or more project identifiers + or project numbers from which to retrieve log entries. Example: + ``\"my-project-1A\"``. If present, these project identifiers are converted to + resource name format and added to the list of resources in + ``resource_names``. + filter_ (str): Optional. A filter that chooses which log entries to return. See [Advanced + Logs Filters](/logging/docs/view/advanced_filters). Only log entries that + match the filter are returned. An empty filter matches all log entries in + the resources listed in ``resource_names``. Referencing a parent resource + that is not listed in ``resource_names`` will cause the filter to return no + results. + The maximum length of the filter is 20000 characters. + order_by (str): Optional. How the results should be sorted. Presently, the only permitted + values are ``\"timestamp asc\"`` (default) and ``\"timestamp desc\"``. The first + option returns entries in order of increasing values of + ``LogEntry.timestamp`` (oldest first), and the second option returns entries + in order of decreasing timestamps (newest first). Entries with equal + timestamps are returned in order of their ``insert_id`` values. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.gax.PageIterator` instance. By default, this + is an iterable of :class:`~google.cloud.logging_v2.types.LogEntry` instances. + This object can also be configured to iterate over the pages + of the response through the `options` parameter. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_pb2.ListLogEntriesRequest( + resource_names=resource_names, + project_ids=project_ids, + filter=filter_, + order_by=order_by, + page_size=page_size, + ) + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._list_log_entries, + retry=retry, + timeout=timeout, + metadata=metadata), + request=request, + items_field='entries', + request_token_field='page_token', + response_token_field='next_page_token', + ) + return iterator + + def list_monitored_resource_descriptors( + self, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Lists the descriptors for monitored resource types used by Stackdriver + Logging. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.LoggingServiceV2Client() + >>> + >>> + >>> # Iterate over all results + >>> for element in client.list_monitored_resource_descriptors(): + ... # process element + ... pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_monitored_resource_descriptors(options=CallOptions(page_token=INITIAL_PAGE)): + ... for element in page: + ... # process element + ... pass + + Args: + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.gax.PageIterator` instance. By default, this + is an iterable of :class:`~google.cloud.logging_v2.types.MonitoredResourceDescriptor` instances. + This object can also be configured to iterate over the pages + of the response through the `options` parameter. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_pb2.ListMonitoredResourceDescriptorsRequest( + page_size=page_size, ) + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._list_monitored_resource_descriptors, + retry=retry, + timeout=timeout, + metadata=metadata), + request=request, + items_field='resource_descriptors', + request_token_field='page_token', + response_token_field='next_page_token', + ) + return iterator + + def list_logs(self, + parent, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Lists the logs in projects, organizations, folders, or billing accounts. + Only logs that have entries are listed. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.LoggingServiceV2Client() + >>> + >>> parent = client.project_path('[PROJECT]') + >>> + >>> + >>> # Iterate over all results + >>> for element in client.list_logs(parent): + ... # process element + ... pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_logs(parent, options=CallOptions(page_token=INITIAL_PAGE)): + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): Required. The resource name that owns the logs: + + :: + + \"projects/[PROJECT_ID]\" + \"organizations/[ORGANIZATION_ID]\" + \"billingAccounts/[BILLING_ACCOUNT_ID]\" + \"folders/[FOLDER_ID]\" + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.gax.PageIterator` instance. By default, this + is an iterable of :class:`str` instances. + This object can also be configured to iterate over the pages + of the response through the `options` parameter. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_pb2.ListLogsRequest( + parent=parent, + page_size=page_size, + ) + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._list_logs, + retry=retry, + timeout=timeout, + metadata=metadata), + request=request, + items_field='log_names', + request_token_field='page_token', + response_token_field='next_page_token', + ) + return iterator diff --git a/logging/google/cloud/logging_v2/gapic/logging_service_v2_client_config.py b/logging/google/cloud/logging_v2/gapic/logging_service_v2_client_config.py new file mode 100644 index 000000000000..d70c2ef6a65a --- /dev/null +++ b/logging/google/cloud/logging_v2/gapic/logging_service_v2_client_config.py @@ -0,0 +1,62 @@ +config = { + "interfaces": { + "google.logging.v2.LoggingServiceV2": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"], + "non_idempotent": [] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.2, + "max_retry_delay_millis": 1000, + "initial_rpc_timeout_millis": 20000, + "rpc_timeout_multiplier": 1.5, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 90000 + }, + "list": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.2, + "max_retry_delay_millis": 1000, + "initial_rpc_timeout_millis": 2000, + "rpc_timeout_multiplier": 1.5, + "max_rpc_timeout_millis": 10000, + "total_timeout_millis": 20000 + } + }, + "methods": { + "DeleteLog": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "WriteLogEntries": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + "bundling": { + "element_count_threshold": 1000, + "request_byte_threshold": 1048576, + "delay_threshold_millis": 50 + } + }, + "ListLogEntries": { + "timeout_millis": 10000, + "retry_codes_name": "idempotent", + "retry_params_name": "list" + }, + "ListMonitoredResourceDescriptors": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ListLogs": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py b/logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py new file mode 100644 index 000000000000..823588e27cfd --- /dev/null +++ b/logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py @@ -0,0 +1,465 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Accesses the google.logging.v2 MetricsServiceV2 API.""" + +import functools +import pkg_resources + +import google.api_core.gapic_v1.client_info +import google.api_core.gapic_v1.config +import google.api_core.gapic_v1.method +import google.api_core.grpc_helpers +import google.api_core.page_iterator +import google.api_core.path_template + +from google.api import monitored_resource_pb2 +from google.cloud.logging_v2.gapic import enums +from google.cloud.logging_v2.gapic import metrics_service_v2_client_config +from google.cloud.logging_v2.proto import log_entry_pb2 +from google.cloud.logging_v2.proto import logging_config_pb2 +from google.cloud.logging_v2.proto import logging_metrics_pb2 +from google.cloud.logging_v2.proto import logging_pb2 +from google.protobuf import field_mask_pb2 + +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + 'google-cloud-logging', ).version + + +class MetricsServiceV2Client(object): + """Service for configuring logs-based metrics.""" + + SERVICE_ADDRESS = 'logging.googleapis.com:443' + """The default address of the service.""" + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _DEFAULT_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + ) + + # The name of the interface for this client. This is the key used to find + # method configuration in the client_config dictionary. + _INTERFACE_NAME = 'google.logging.v2.MetricsServiceV2' + + @classmethod + def project_path(cls, project): + """Return a fully-qualified project string.""" + return google.api_core.path_template.expand( + 'projects/{project}', + project=project, + ) + + @classmethod + def metric_path(cls, project, metric): + """Return a fully-qualified metric string.""" + return google.api_core.path_template.expand( + 'projects/{project}/metrics/{metric}', + project=project, + metric=metric, + ) + + def __init__(self, + channel=None, + credentials=None, + client_config=metrics_service_v2_client_config.config, + client_info=None): + """Constructor. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_config (dict): A dictionary of call options for each + method. If not specified, the default configuration is used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + 'The `channel` and `credentials` arguments to {} are mutually ' + 'exclusive.'.format(self.__class__.__name__), ) + + # Create the channel. + if channel is None: + channel = google.api_core.grpc_helpers.create_channel( + self.SERVICE_ADDRESS, + credentials=credentials, + scopes=self._DEFAULT_SCOPES, + ) + + # Create the gRPC stubs. + self.metrics_service_v2_stub = ( + logging_metrics_pb2.MetricsServiceV2Stub(channel)) + + if client_info is None: + client_info = ( + google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO) + client_info.gapic_version = _GAPIC_LIBRARY_VERSION + + # Parse out the default settings for retry and timeout for each RPC + # from the client configuration. + # (Ordinarily, these are the defaults specified in the `*_config.py` + # file next to this one.) + method_configs = google.api_core.gapic_v1.config.parse_method_configs( + client_config['interfaces'][self._INTERFACE_NAME], ) + + # Write the "inner API call" methods to the class. + # These are wrapped versions of the gRPC stub methods, with retry and + # timeout configuration applied, called by the public methods on + # this class. + self._list_log_metrics = google.api_core.gapic_v1.method.wrap_method( + self.metrics_service_v2_stub.ListLogMetrics, + default_retry=method_configs['ListLogMetrics'].retry, + default_timeout=method_configs['ListLogMetrics'].timeout, + client_info=client_info, + ) + self._get_log_metric = google.api_core.gapic_v1.method.wrap_method( + self.metrics_service_v2_stub.GetLogMetric, + default_retry=method_configs['GetLogMetric'].retry, + default_timeout=method_configs['GetLogMetric'].timeout, + client_info=client_info, + ) + self._create_log_metric = google.api_core.gapic_v1.method.wrap_method( + self.metrics_service_v2_stub.CreateLogMetric, + default_retry=method_configs['CreateLogMetric'].retry, + default_timeout=method_configs['CreateLogMetric'].timeout, + client_info=client_info, + ) + self._update_log_metric = google.api_core.gapic_v1.method.wrap_method( + self.metrics_service_v2_stub.UpdateLogMetric, + default_retry=method_configs['UpdateLogMetric'].retry, + default_timeout=method_configs['UpdateLogMetric'].timeout, + client_info=client_info, + ) + self._delete_log_metric = google.api_core.gapic_v1.method.wrap_method( + self.metrics_service_v2_stub.DeleteLogMetric, + default_retry=method_configs['DeleteLogMetric'].retry, + default_timeout=method_configs['DeleteLogMetric'].timeout, + client_info=client_info, + ) + + # Service calls + def list_log_metrics(self, + parent, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Lists logs-based metrics. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.MetricsServiceV2Client() + >>> + >>> parent = client.project_path('[PROJECT]') + >>> + >>> + >>> # Iterate over all results + >>> for element in client.list_log_metrics(parent): + ... # process element + ... pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_log_metrics(parent, options=CallOptions(page_token=INITIAL_PAGE)): + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): Required. The name of the project containing the metrics: + + :: + + \"projects/[PROJECT_ID]\" + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.gax.PageIterator` instance. By default, this + is an iterable of :class:`~google.cloud.logging_v2.types.LogMetric` instances. + This object can also be configured to iterate over the pages + of the response through the `options` parameter. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_metrics_pb2.ListLogMetricsRequest( + parent=parent, + page_size=page_size, + ) + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._list_log_metrics, + retry=retry, + timeout=timeout, + metadata=metadata), + request=request, + items_field='metrics', + request_token_field='page_token', + response_token_field='next_page_token', + ) + return iterator + + def get_log_metric(self, + metric_name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Gets a logs-based metric. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.MetricsServiceV2Client() + >>> + >>> metric_name = client.metric_path('[PROJECT]', '[METRIC]') + >>> + >>> response = client.get_log_metric(metric_name) + + Args: + metric_name (str): The resource name of the desired metric: + + :: + + \"projects/[PROJECT_ID]/metrics/[METRIC_ID]\" + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.logging_v2.types.LogMetric` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_metrics_pb2.GetLogMetricRequest( + metric_name=metric_name, ) + return self._get_log_metric( + request, retry=retry, timeout=timeout, metadata=metadata) + + def create_log_metric(self, + parent, + metric, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Creates a logs-based metric. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.MetricsServiceV2Client() + >>> + >>> parent = client.project_path('[PROJECT]') + >>> metric = {} + >>> + >>> response = client.create_log_metric(parent, metric) + + Args: + parent (str): The resource name of the project in which to create the metric: + + :: + + \"projects/[PROJECT_ID]\" + + The new metric must be provided in the request. + metric (Union[dict, ~google.cloud.logging_v2.types.LogMetric]): The new logs-based metric, which must not have an identifier that + already exists. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.logging_v2.types.LogMetric` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.logging_v2.types.LogMetric` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_metrics_pb2.CreateLogMetricRequest( + parent=parent, + metric=metric, + ) + return self._create_log_metric( + request, retry=retry, timeout=timeout, metadata=metadata) + + def update_log_metric(self, + metric_name, + metric, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Creates or updates a logs-based metric. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.MetricsServiceV2Client() + >>> + >>> metric_name = client.metric_path('[PROJECT]', '[METRIC]') + >>> metric = {} + >>> + >>> response = client.update_log_metric(metric_name, metric) + + Args: + metric_name (str): The resource name of the metric to update: + + :: + + \"projects/[PROJECT_ID]/metrics/[METRIC_ID]\" + + The updated metric must be provided in the request and it's + ``name`` field must be the same as ``[METRIC_ID]`` If the metric + does not exist in ``[PROJECT_ID]``, then a new metric is created. + metric (Union[dict, ~google.cloud.logging_v2.types.LogMetric]): The updated metric. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.logging_v2.types.LogMetric` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.logging_v2.types.LogMetric` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_metrics_pb2.UpdateLogMetricRequest( + metric_name=metric_name, + metric=metric, + ) + return self._update_log_metric( + request, retry=retry, timeout=timeout, metadata=metadata) + + def delete_log_metric(self, + metric_name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Deletes a logs-based metric. + + Example: + >>> from google.cloud import logging_v2 + >>> + >>> client = logging_v2.MetricsServiceV2Client() + >>> + >>> metric_name = client.metric_path('[PROJECT]', '[METRIC]') + >>> + >>> client.delete_log_metric(metric_name) + + Args: + metric_name (str): The resource name of the metric to delete: + + :: + + \"projects/[PROJECT_ID]/metrics/[METRIC_ID]\" + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = logging_metrics_pb2.DeleteLogMetricRequest( + metric_name=metric_name, ) + self._delete_log_metric( + request, retry=retry, timeout=timeout, metadata=metadata) diff --git a/logging/google/cloud/logging_v2/gapic/metrics_service_v2_client_config.py b/logging/google/cloud/logging_v2/gapic/metrics_service_v2_client_config.py new file mode 100644 index 000000000000..9ff717dd0213 --- /dev/null +++ b/logging/google/cloud/logging_v2/gapic/metrics_service_v2_client_config.py @@ -0,0 +1,48 @@ +config = { + "interfaces": { + "google.logging.v2.MetricsServiceV2": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"], + "non_idempotent": [] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.2, + "max_retry_delay_millis": 1000, + "initial_rpc_timeout_millis": 20000, + "rpc_timeout_multiplier": 1.5, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 90000 + } + }, + "methods": { + "ListLogMetrics": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "GetLogMetric": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "CreateLogMetric": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "UpdateLogMetric": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "DeleteLogMetric": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/logging/google/cloud/logging_v2/proto/__init__.py b/logging/google/cloud/logging_v2/proto/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/logging/google/cloud/logging_v2/proto/log_entry_pb2.py b/logging/google/cloud/logging_v2/proto/log_entry_pb2.py new file mode 100644 index 000000000000..88c8f6954fac --- /dev/null +++ b/logging/google/cloud/logging_v2/proto/log_entry_pb2.py @@ -0,0 +1,508 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/logging_v2/proto/log_entry.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2 +from google.logging.type import http_request_pb2 as google_dot_logging_dot_type_dot_http__request__pb2 +from google.logging.type import log_severity_pb2 as google_dot_logging_dot_type_dot_log__severity__pb2 +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/logging_v2/proto/log_entry.proto', + package='google.logging.v2', + syntax='proto3', + serialized_pb=_b('\n-google/cloud/logging_v2/proto/log_entry.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a#google/api/monitored_resource.proto\x1a&google/logging/type/http_request.proto\x1a&google/logging/type/log_severity.proto\x1a\x19google/protobuf/any.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xba\x05\n\x08LogEntry\x12\x10\n\x08log_name\x18\x0c \x01(\t\x12/\n\x08resource\x18\x08 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12-\n\rproto_payload\x18\x02 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x12\x16\n\x0ctext_payload\x18\x03 \x01(\tH\x00\x12/\n\x0cjson_payload\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12-\n\ttimestamp\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x35\n\x11receive_timestamp\x18\x18 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x08severity\x18\n \x01(\x0e\x32 .google.logging.type.LogSeverity\x12\x11\n\tinsert_id\x18\x04 \x01(\t\x12\x36\n\x0chttp_request\x18\x07 \x01(\x0b\x32 .google.logging.type.HttpRequest\x12\x37\n\x06labels\x18\x0b \x03(\x0b\x32\'.google.logging.v2.LogEntry.LabelsEntry\x12\x37\n\toperation\x18\x0f \x01(\x0b\x32$.google.logging.v2.LogEntryOperation\x12\r\n\x05trace\x18\x16 \x01(\t\x12\x0f\n\x07span_id\x18\x1b \x01(\t\x12\x42\n\x0fsource_location\x18\x17 \x01(\x0b\x32).google.logging.v2.LogEntrySourceLocation\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07payload\"N\n\x11LogEntryOperation\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08producer\x18\x02 \x01(\t\x12\r\n\x05\x66irst\x18\x03 \x01(\x08\x12\x0c\n\x04last\x18\x04 \x01(\x08\"F\n\x16LogEntrySourceLocation\x12\x0c\n\x04\x66ile\x18\x01 \x01(\t\x12\x0c\n\x04line\x18\x02 \x01(\x03\x12\x10\n\x08\x66unction\x18\x03 \x01(\tB\x99\x01\n\x15\x63om.google.logging.v2B\rLogEntryProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR,google_dot_logging_dot_type_dot_http__request__pb2.DESCRIPTOR,google_dot_logging_dot_type_dot_log__severity__pb2.DESCRIPTOR,google_dot_protobuf_dot_any__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_LOGENTRY_LABELSENTRY = _descriptor.Descriptor( + name='LabelsEntry', + full_name='google.logging.v2.LogEntry.LabelsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.logging.v2.LogEntry.LabelsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.logging.v2.LogEntry.LabelsEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=948, + serialized_end=993, +) + +_LOGENTRY = _descriptor.Descriptor( + name='LogEntry', + full_name='google.logging.v2.LogEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='log_name', full_name='google.logging.v2.LogEntry.log_name', index=0, + number=12, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='resource', full_name='google.logging.v2.LogEntry.resource', index=1, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='proto_payload', full_name='google.logging.v2.LogEntry.proto_payload', index=2, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='text_payload', full_name='google.logging.v2.LogEntry.text_payload', index=3, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='json_payload', full_name='google.logging.v2.LogEntry.json_payload', index=4, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timestamp', full_name='google.logging.v2.LogEntry.timestamp', index=5, + number=9, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='receive_timestamp', full_name='google.logging.v2.LogEntry.receive_timestamp', index=6, + number=24, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='severity', full_name='google.logging.v2.LogEntry.severity', index=7, + number=10, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='insert_id', full_name='google.logging.v2.LogEntry.insert_id', index=8, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='http_request', full_name='google.logging.v2.LogEntry.http_request', index=9, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='labels', full_name='google.logging.v2.LogEntry.labels', index=10, + number=11, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='operation', full_name='google.logging.v2.LogEntry.operation', index=11, + number=15, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='trace', full_name='google.logging.v2.LogEntry.trace', index=12, + number=22, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='span_id', full_name='google.logging.v2.LogEntry.span_id', index=13, + number=27, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='source_location', full_name='google.logging.v2.LogEntry.source_location', index=14, + number=23, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_LOGENTRY_LABELSENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='payload', full_name='google.logging.v2.LogEntry.payload', + index=0, containing_type=None, fields=[]), + ], + serialized_start=306, + serialized_end=1004, +) + + +_LOGENTRYOPERATION = _descriptor.Descriptor( + name='LogEntryOperation', + full_name='google.logging.v2.LogEntryOperation', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='id', full_name='google.logging.v2.LogEntryOperation.id', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='producer', full_name='google.logging.v2.LogEntryOperation.producer', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='first', full_name='google.logging.v2.LogEntryOperation.first', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='last', full_name='google.logging.v2.LogEntryOperation.last', index=3, + number=4, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1006, + serialized_end=1084, +) + + +_LOGENTRYSOURCELOCATION = _descriptor.Descriptor( + name='LogEntrySourceLocation', + full_name='google.logging.v2.LogEntrySourceLocation', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='file', full_name='google.logging.v2.LogEntrySourceLocation.file', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='line', full_name='google.logging.v2.LogEntrySourceLocation.line', index=1, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='function', full_name='google.logging.v2.LogEntrySourceLocation.function', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1086, + serialized_end=1156, +) + +_LOGENTRY_LABELSENTRY.containing_type = _LOGENTRY +_LOGENTRY.fields_by_name['resource'].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCE +_LOGENTRY.fields_by_name['proto_payload'].message_type = google_dot_protobuf_dot_any__pb2._ANY +_LOGENTRY.fields_by_name['json_payload'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT +_LOGENTRY.fields_by_name['timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGENTRY.fields_by_name['receive_timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGENTRY.fields_by_name['severity'].enum_type = google_dot_logging_dot_type_dot_log__severity__pb2._LOGSEVERITY +_LOGENTRY.fields_by_name['http_request'].message_type = google_dot_logging_dot_type_dot_http__request__pb2._HTTPREQUEST +_LOGENTRY.fields_by_name['labels'].message_type = _LOGENTRY_LABELSENTRY +_LOGENTRY.fields_by_name['operation'].message_type = _LOGENTRYOPERATION +_LOGENTRY.fields_by_name['source_location'].message_type = _LOGENTRYSOURCELOCATION +_LOGENTRY.oneofs_by_name['payload'].fields.append( + _LOGENTRY.fields_by_name['proto_payload']) +_LOGENTRY.fields_by_name['proto_payload'].containing_oneof = _LOGENTRY.oneofs_by_name['payload'] +_LOGENTRY.oneofs_by_name['payload'].fields.append( + _LOGENTRY.fields_by_name['text_payload']) +_LOGENTRY.fields_by_name['text_payload'].containing_oneof = _LOGENTRY.oneofs_by_name['payload'] +_LOGENTRY.oneofs_by_name['payload'].fields.append( + _LOGENTRY.fields_by_name['json_payload']) +_LOGENTRY.fields_by_name['json_payload'].containing_oneof = _LOGENTRY.oneofs_by_name['payload'] +DESCRIPTOR.message_types_by_name['LogEntry'] = _LOGENTRY +DESCRIPTOR.message_types_by_name['LogEntryOperation'] = _LOGENTRYOPERATION +DESCRIPTOR.message_types_by_name['LogEntrySourceLocation'] = _LOGENTRYSOURCELOCATION + +LogEntry = _reflection.GeneratedProtocolMessageType('LogEntry', (_message.Message,), dict( + + LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( + DESCRIPTOR = _LOGENTRY_LABELSENTRY, + __module__ = 'google.cloud.logging_v2.proto.log_entry_pb2' + # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntry.LabelsEntry) + )) + , + DESCRIPTOR = _LOGENTRY, + __module__ = 'google.cloud.logging_v2.proto.log_entry_pb2' + , + __doc__ = """An individual entry in a log. + + + Attributes: + log_name: + Required. The resource name of the log to which this log entry + belongs: :: "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" A project number may + optionally be used in place of PROJECT\_ID. The project number + is translated to its corresponding PROJECT\_ID internally and + the ``log_name`` field will contain PROJECT\_ID in queries and + exports. ``[LOG_ID]`` must be URL-encoded within + ``log_name``. Example: ``"organizations/1234567890/logs/cloudr + esourcemanager.googleapis.com%2Factivity"``. ``[LOG_ID]`` must + be less than 512 characters long and can only include the + following characters: upper and lower case alphanumeric + characters, forward-slash, underscore, hyphen, and period. + For backward compatibility, if ``log_name`` begins with a + forward-slash, such as ``/projects/...``, then the log entry + is ingested as usual but the forward-slash is removed. Listing + the log entry will not show the leading slash and filtering + for a log name with a leading slash will never return any + results. + resource: + Required. The monitored resource associated with this log + entry. Example: a log entry that reports a database error + would be associated with the monitored resource designating + the particular database that reported the error. + payload: + Optional. The log entry payload, which can be one of multiple + types. + proto_payload: + The log entry payload, represented as a protocol buffer. Some + Google Cloud Platform services use this field for their log + entry payloads. + text_payload: + The log entry payload, represented as a Unicode string + (UTF-8). + json_payload: + The log entry payload, represented as a structure that is + expressed as a JSON object. + timestamp: + Optional. The time the event described by the log entry + occurred. This time is used to compute the log entry's age and + to enforce the logs retention period. If this field is omitted + in a new log entry, then Stackdriver Logging assigns it the + current time. Incoming log entries should have timestamps + that are no more than the `logs retention period + `__ in the past, and no more than 24 + hours in the future. See the ``entries.write`` API method for + more information. + receive_timestamp: + Output only. The time the log entry was received by + Stackdriver Logging. + severity: + Optional. The severity of the log entry. The default value is + ``LogSeverity.DEFAULT``. + insert_id: + Optional. A unique identifier for the log entry. If you + provide a value, then Stackdriver Logging considers other log + entries in the same project, with the same ``timestamp``, and + with the same ``insert_id`` to be duplicates which can be + removed. If omitted in new log entries, then Stackdriver + Logging assigns its own unique identifier. The ``insert_id`` + is also used to order log entries that have the same + ``timestamp`` value. + http_request: + Optional. Information about the HTTP request associated with + this log entry, if applicable. + labels: + Optional. A set of user-defined (key, value) data that + provides additional information about the log entry. + operation: + Optional. Information about an operation associated with the + log entry, if applicable. + trace: + Optional. Resource name of the trace associated with the log + entry, if any. If it contains a relative resource name, the + name is assumed to be relative to + ``//tracing.googleapis.com``. Example: ``projects/my- + projectid/traces/06796866738c859f2f19b7cfb3214824`` + span_id: + Optional. Id of the span within the trace associated with the + log entry. e.g. "0000000000000042" For Stackdriver trace + spans, this is the same format that the Stackdriver trace API + uses. The ID is a 16-character hexadecimal encoding of an + 8-byte array. + source_location: + Optional. Source code location information associated with the + log entry, if any. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntry) + )) +_sym_db.RegisterMessage(LogEntry) +_sym_db.RegisterMessage(LogEntry.LabelsEntry) + +LogEntryOperation = _reflection.GeneratedProtocolMessageType('LogEntryOperation', (_message.Message,), dict( + DESCRIPTOR = _LOGENTRYOPERATION, + __module__ = 'google.cloud.logging_v2.proto.log_entry_pb2' + , + __doc__ = """Additional information about a potentially long-running operation with + which a log entry is associated. + + + Attributes: + id: + Optional. An arbitrary operation identifier. Log entries with + the same identifier are assumed to be part of the same + operation. + producer: + Optional. An arbitrary producer identifier. The combination of + ``id`` and ``producer`` must be globally unique. Examples for + ``producer``: ``"MyDivision.MyBigCompany.com"``, + ``"github.com/MyProject/MyApplication"``. + first: + Optional. Set this to True if this is the first log entry in + the operation. + last: + Optional. Set this to True if this is the last log entry in + the operation. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntryOperation) + )) +_sym_db.RegisterMessage(LogEntryOperation) + +LogEntrySourceLocation = _reflection.GeneratedProtocolMessageType('LogEntrySourceLocation', (_message.Message,), dict( + DESCRIPTOR = _LOGENTRYSOURCELOCATION, + __module__ = 'google.cloud.logging_v2.proto.log_entry_pb2' + , + __doc__ = """Additional information about the source code location that produced the + log entry. + + + Attributes: + file: + Optional. Source file name. Depending on the runtime + environment, this might be a simple name or a fully-qualified + name. + line: + Optional. Line within the source file. 1-based; 0 indicates no + line number available. + function: + Optional. Human-readable name of the function or method being + invoked, with optional context such as the class or package + name. This information may be used in contexts such as the + logs viewer, where a file and line number are less meaningful. + The format can vary by language. For example: + ``qual.if.ied.Class.method`` (Java), ``dir/package.func`` + (Go), ``function`` (Python). + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntrySourceLocation) + )) +_sym_db.RegisterMessage(LogEntrySourceLocation) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\025com.google.logging.v2B\rLogEntryProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2')) +_LOGENTRY_LABELSENTRY.has_options = True +_LOGENTRY_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +try: + # THESE ELEMENTS WILL BE DEPRECATED. + # Please use the generated *_pb2_grpc.py files instead. + import grpc + from grpc.beta import implementations as beta_implementations + from grpc.beta import interfaces as beta_interfaces + from grpc.framework.common import cardinality + from grpc.framework.interfaces.face import utilities as face_utilities +except ImportError: + pass +# @@protoc_insertion_point(module_scope) diff --git a/logging/google/cloud/logging_v2/proto/log_entry_pb2_grpc.py b/logging/google/cloud/logging_v2/proto/log_entry_pb2_grpc.py new file mode 100644 index 000000000000..a89435267cb2 --- /dev/null +++ b/logging/google/cloud/logging_v2/proto/log_entry_pb2_grpc.py @@ -0,0 +1,3 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + diff --git a/logging/google/cloud/logging_v2/proto/logging_config_pb2.py b/logging/google/cloud/logging_v2/proto/logging_config_pb2.py new file mode 100644 index 000000000000..5bcaeba1e9d1 --- /dev/null +++ b/logging/google/cloud/logging_v2/proto/logging_config_pb2.py @@ -0,0 +1,1575 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/logging_v2/proto/logging_config.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/logging_v2/proto/logging_config.proto', + package='google.logging.v2', + syntax='proto3', + serialized_pb=_b('\n2google/cloud/logging_v2/proto/logging_config.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xd7\x02\n\x07LogSink\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65stination\x18\x03 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t\x12G\n\x15output_version_format\x18\x06 \x01(\x0e\x32(.google.logging.v2.LogSink.VersionFormat\x12\x17\n\x0fwriter_identity\x18\x08 \x01(\t\x12\x18\n\x10include_children\x18\t \x01(\x08\x12.\n\nstart_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"?\n\rVersionFormat\x12\x1e\n\x1aVERSION_FORMAT_UNSPECIFIED\x10\x00\x12\x06\n\x02V2\x10\x01\x12\x06\n\x02V1\x10\x02\"I\n\x10ListSinksRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\"W\n\x11ListSinksResponse\x12)\n\x05sinks\x18\x01 \x03(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"#\n\x0eGetSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t\"m\n\x11\x43reateSinkRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08\"\xa1\x01\n\x11UpdateSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"&\n\x11\x44\x65leteSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t\"S\n\x0cLogExclusion\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x10\n\x08\x64isabled\x18\x04 \x01(\x08\"N\n\x15ListExclusionsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\"f\n\x16ListExclusionsResponse\x12\x33\n\nexclusions\x18\x01 \x03(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"#\n\x13GetExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\\\n\x16\x43reateExclusionRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion\"\x8b\x01\n\x16UpdateExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"&\n\x16\x44\x65leteExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t2\x80\n\n\x0f\x43onfigServiceV2\x12v\n\tListSinks\x12#.google.logging.v2.ListSinksRequest\x1a$.google.logging.v2.ListSinksResponse\"\x1e\x82\xd3\xe4\x93\x02\x18\x12\x16/v2/{parent=*/*}/sinks\x12m\n\x07GetSink\x12!.google.logging.v2.GetSinkRequest\x1a\x1a.google.logging.v2.LogSink\"#\x82\xd3\xe4\x93\x02\x1d\x12\x1b/v2/{sink_name=*/*/sinks/*}\x12t\n\nCreateSink\x12$.google.logging.v2.CreateSinkRequest\x1a\x1a.google.logging.v2.LogSink\"$\x82\xd3\xe4\x93\x02\x1e\"\x16/v2/{parent=*/*}/sinks:\x04sink\x12y\n\nUpdateSink\x12$.google.logging.v2.UpdateSinkRequest\x1a\x1a.google.logging.v2.LogSink\")\x82\xd3\xe4\x93\x02#\x1a\x1b/v2/{sink_name=*/*/sinks/*}:\x04sink\x12o\n\nDeleteSink\x12$.google.logging.v2.DeleteSinkRequest\x1a\x16.google.protobuf.Empty\"#\x82\xd3\xe4\x93\x02\x1d*\x1b/v2/{sink_name=*/*/sinks/*}\x12\x8a\x01\n\x0eListExclusions\x12(.google.logging.v2.ListExclusionsRequest\x1a).google.logging.v2.ListExclusionsResponse\"#\x82\xd3\xe4\x93\x02\x1d\x12\x1b/v2/{parent=*/*}/exclusions\x12|\n\x0cGetExclusion\x12&.google.logging.v2.GetExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion\"#\x82\xd3\xe4\x93\x02\x1d\x12\x1b/v2/{name=*/*/exclusions/*}\x12\x8d\x01\n\x0f\x43reateExclusion\x12).google.logging.v2.CreateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion\".\x82\xd3\xe4\x93\x02(\"\x1b/v2/{parent=*/*}/exclusions:\texclusion\x12\x8d\x01\n\x0fUpdateExclusion\x12).google.logging.v2.UpdateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion\".\x82\xd3\xe4\x93\x02(2\x1b/v2/{name=*/*/exclusions/*}:\texclusion\x12y\n\x0f\x44\x65leteExclusion\x12).google.logging.v2.DeleteExclusionRequest\x1a\x16.google.protobuf.Empty\"#\x82\xd3\xe4\x93\x02\x1d*\x1b/v2/{name=*/*/exclusions/*}B\x9e\x01\n\x15\x63om.google.logging.v2B\x12LoggingConfigProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + +_LOGSINK_VERSIONFORMAT = _descriptor.EnumDescriptor( + name='VersionFormat', + full_name='google.logging.v2.LogSink.VersionFormat', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='VERSION_FORMAT_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='V2', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='V1', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=480, + serialized_end=543, +) +_sym_db.RegisterEnumDescriptor(_LOGSINK_VERSIONFORMAT) + + +_LOGSINK = _descriptor.Descriptor( + name='LogSink', + full_name='google.logging.v2.LogSink', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.logging.v2.LogSink.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='destination', full_name='google.logging.v2.LogSink.destination', index=1, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='filter', full_name='google.logging.v2.LogSink.filter', index=2, + number=5, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='output_version_format', full_name='google.logging.v2.LogSink.output_version_format', index=3, + number=6, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='writer_identity', full_name='google.logging.v2.LogSink.writer_identity', index=4, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='include_children', full_name='google.logging.v2.LogSink.include_children', index=5, + number=9, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='start_time', full_name='google.logging.v2.LogSink.start_time', index=6, + number=10, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='end_time', full_name='google.logging.v2.LogSink.end_time', index=7, + number=11, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _LOGSINK_VERSIONFORMAT, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=200, + serialized_end=543, +) + + +_LISTSINKSREQUEST = _descriptor.Descriptor( + name='ListSinksRequest', + full_name='google.logging.v2.ListSinksRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.logging.v2.ListSinksRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.logging.v2.ListSinksRequest.page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.logging.v2.ListSinksRequest.page_size', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=545, + serialized_end=618, +) + + +_LISTSINKSRESPONSE = _descriptor.Descriptor( + name='ListSinksResponse', + full_name='google.logging.v2.ListSinksResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='sinks', full_name='google.logging.v2.ListSinksResponse.sinks', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.logging.v2.ListSinksResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=620, + serialized_end=707, +) + + +_GETSINKREQUEST = _descriptor.Descriptor( + name='GetSinkRequest', + full_name='google.logging.v2.GetSinkRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='sink_name', full_name='google.logging.v2.GetSinkRequest.sink_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=709, + serialized_end=744, +) + + +_CREATESINKREQUEST = _descriptor.Descriptor( + name='CreateSinkRequest', + full_name='google.logging.v2.CreateSinkRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.logging.v2.CreateSinkRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sink', full_name='google.logging.v2.CreateSinkRequest.sink', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='unique_writer_identity', full_name='google.logging.v2.CreateSinkRequest.unique_writer_identity', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=746, + serialized_end=855, +) + + +_UPDATESINKREQUEST = _descriptor.Descriptor( + name='UpdateSinkRequest', + full_name='google.logging.v2.UpdateSinkRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='sink_name', full_name='google.logging.v2.UpdateSinkRequest.sink_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sink', full_name='google.logging.v2.UpdateSinkRequest.sink', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='unique_writer_identity', full_name='google.logging.v2.UpdateSinkRequest.unique_writer_identity', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='update_mask', full_name='google.logging.v2.UpdateSinkRequest.update_mask', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=858, + serialized_end=1019, +) + + +_DELETESINKREQUEST = _descriptor.Descriptor( + name='DeleteSinkRequest', + full_name='google.logging.v2.DeleteSinkRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='sink_name', full_name='google.logging.v2.DeleteSinkRequest.sink_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1021, + serialized_end=1059, +) + + +_LOGEXCLUSION = _descriptor.Descriptor( + name='LogExclusion', + full_name='google.logging.v2.LogExclusion', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.logging.v2.LogExclusion.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='description', full_name='google.logging.v2.LogExclusion.description', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='filter', full_name='google.logging.v2.LogExclusion.filter', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='disabled', full_name='google.logging.v2.LogExclusion.disabled', index=3, + number=4, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1061, + serialized_end=1144, +) + + +_LISTEXCLUSIONSREQUEST = _descriptor.Descriptor( + name='ListExclusionsRequest', + full_name='google.logging.v2.ListExclusionsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.logging.v2.ListExclusionsRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.logging.v2.ListExclusionsRequest.page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.logging.v2.ListExclusionsRequest.page_size', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1146, + serialized_end=1224, +) + + +_LISTEXCLUSIONSRESPONSE = _descriptor.Descriptor( + name='ListExclusionsResponse', + full_name='google.logging.v2.ListExclusionsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='exclusions', full_name='google.logging.v2.ListExclusionsResponse.exclusions', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.logging.v2.ListExclusionsResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1226, + serialized_end=1328, +) + + +_GETEXCLUSIONREQUEST = _descriptor.Descriptor( + name='GetExclusionRequest', + full_name='google.logging.v2.GetExclusionRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.logging.v2.GetExclusionRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1330, + serialized_end=1365, +) + + +_CREATEEXCLUSIONREQUEST = _descriptor.Descriptor( + name='CreateExclusionRequest', + full_name='google.logging.v2.CreateExclusionRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.logging.v2.CreateExclusionRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='exclusion', full_name='google.logging.v2.CreateExclusionRequest.exclusion', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1367, + serialized_end=1459, +) + + +_UPDATEEXCLUSIONREQUEST = _descriptor.Descriptor( + name='UpdateExclusionRequest', + full_name='google.logging.v2.UpdateExclusionRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.logging.v2.UpdateExclusionRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='exclusion', full_name='google.logging.v2.UpdateExclusionRequest.exclusion', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='update_mask', full_name='google.logging.v2.UpdateExclusionRequest.update_mask', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1462, + serialized_end=1601, +) + + +_DELETEEXCLUSIONREQUEST = _descriptor.Descriptor( + name='DeleteExclusionRequest', + full_name='google.logging.v2.DeleteExclusionRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.logging.v2.DeleteExclusionRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1603, + serialized_end=1641, +) + +_LOGSINK.fields_by_name['output_version_format'].enum_type = _LOGSINK_VERSIONFORMAT +_LOGSINK.fields_by_name['start_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGSINK.fields_by_name['end_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGSINK_VERSIONFORMAT.containing_type = _LOGSINK +_LISTSINKSRESPONSE.fields_by_name['sinks'].message_type = _LOGSINK +_CREATESINKREQUEST.fields_by_name['sink'].message_type = _LOGSINK +_UPDATESINKREQUEST.fields_by_name['sink'].message_type = _LOGSINK +_UPDATESINKREQUEST.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_LISTEXCLUSIONSRESPONSE.fields_by_name['exclusions'].message_type = _LOGEXCLUSION +_CREATEEXCLUSIONREQUEST.fields_by_name['exclusion'].message_type = _LOGEXCLUSION +_UPDATEEXCLUSIONREQUEST.fields_by_name['exclusion'].message_type = _LOGEXCLUSION +_UPDATEEXCLUSIONREQUEST.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +DESCRIPTOR.message_types_by_name['LogSink'] = _LOGSINK +DESCRIPTOR.message_types_by_name['ListSinksRequest'] = _LISTSINKSREQUEST +DESCRIPTOR.message_types_by_name['ListSinksResponse'] = _LISTSINKSRESPONSE +DESCRIPTOR.message_types_by_name['GetSinkRequest'] = _GETSINKREQUEST +DESCRIPTOR.message_types_by_name['CreateSinkRequest'] = _CREATESINKREQUEST +DESCRIPTOR.message_types_by_name['UpdateSinkRequest'] = _UPDATESINKREQUEST +DESCRIPTOR.message_types_by_name['DeleteSinkRequest'] = _DELETESINKREQUEST +DESCRIPTOR.message_types_by_name['LogExclusion'] = _LOGEXCLUSION +DESCRIPTOR.message_types_by_name['ListExclusionsRequest'] = _LISTEXCLUSIONSREQUEST +DESCRIPTOR.message_types_by_name['ListExclusionsResponse'] = _LISTEXCLUSIONSRESPONSE +DESCRIPTOR.message_types_by_name['GetExclusionRequest'] = _GETEXCLUSIONREQUEST +DESCRIPTOR.message_types_by_name['CreateExclusionRequest'] = _CREATEEXCLUSIONREQUEST +DESCRIPTOR.message_types_by_name['UpdateExclusionRequest'] = _UPDATEEXCLUSIONREQUEST +DESCRIPTOR.message_types_by_name['DeleteExclusionRequest'] = _DELETEEXCLUSIONREQUEST + +LogSink = _reflection.GeneratedProtocolMessageType('LogSink', (_message.Message,), dict( + DESCRIPTOR = _LOGSINK, + __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' + , + __doc__ = """Describes a sink used to export log entries to one of the following + destinations in any project: a Cloud Storage bucket, a BigQuery dataset, + or a Cloud Pub/Sub topic. A logs filter controls which log entries are + exported. The sink must be created within a project, organization, + billing account, or folder. + + + Attributes: + name: + Required. The client-assigned sink identifier, unique within + the project. Example: ``"my-syslog-errors-to-pubsub"``. Sink + identifiers are limited to 100 characters and can include only + the following characters: upper and lower-case alphanumeric + characters, underscores, hyphens, and periods. + destination: + Required. The export destination: :: + "storage.googleapis.com/[GCS_BUCKET]" "bigquery.googleapis + .com/projects/[PROJECT_ID]/datasets/[DATASET]" "pubsub.goo + gleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]" The + sink's ``writer_identity``, set when the sink is created, must + have permission to write to the destination or else the log + entries are not exported. For more information, see `Exporting + Logs With Sinks `__. + filter: + Optional. An `advanced logs filter + `__. The only exported + log entries are those that are in the resource owning the sink + and that match the filter. For example: :: + logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND + severity>=ERROR + output_version_format: + Deprecated. The log entry format to use for this sink's + exported log entries. The v2 format is used by default and + cannot be changed. + writer_identity: + Output only. An IAM identity—a service account or group—under + which Stackdriver Logging writes the exported log entries to + the sink's destination. This field is set by `sinks.create + `__ + and `sinks.update `__, based on the setting of + ``unique_writer_identity`` in those methods. Until you grant + this identity write-access to the destination, log entry + exports from this sink will fail. For more information, see + `Granting access for a resource `__. Consult the destination service's documentation to + determine the appropriate IAM roles to assign to the identity. + include_children: + Optional. This field applies only to sinks owned by + organizations and folders. If the field is false, the default, + only the logs owned by the sink's parent resource are + available for export. If the field is true, then logs from all + the projects, folders, and billing accounts contained in the + sink's parent resource are also available for export. Whether + a particular log entry from the children is exported depends + on the sink's filter expression. For example, if this field is + true, then the filter ``resource.type=gce_instance`` would + export all Compute Engine VM instance log entries from all + projects in the sink's parent. To only export entries from + certain child projects, filter on the project part of the log + name: :: logName:("projects/test-project1/" OR + "projects/test-project2/") AND resource.type=gce_instance + start_time: + Deprecated. This field is ignored when creating or updating + sinks. + end_time: + Deprecated. This field is ignored when creating or updating + sinks. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.LogSink) + )) +_sym_db.RegisterMessage(LogSink) + +ListSinksRequest = _reflection.GeneratedProtocolMessageType('ListSinksRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTSINKSREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' + , + __doc__ = """The parameters to ``ListSinks``. + + + Attributes: + parent: + Required. The parent resource whose sinks are to be listed: + :: "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + page_token: + Optional. If present, then retrieve the next batch of results + from the preceding call to this method. ``pageToken`` must be + the value of ``nextPageToken`` from the previous response. The + values of other method parameters should be identical to those + in the previous call. + page_size: + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more results + might be available. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListSinksRequest) + )) +_sym_db.RegisterMessage(ListSinksRequest) + +ListSinksResponse = _reflection.GeneratedProtocolMessageType('ListSinksResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTSINKSRESPONSE, + __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' + , + __doc__ = """Result returned from ``ListSinks``. + + + Attributes: + sinks: + A list of sinks. + next_page_token: + If there might be more results than appear in this response, + then ``nextPageToken`` is included. To get the next set of + results, call the same method again using the value of + ``nextPageToken`` as ``pageToken``. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListSinksResponse) + )) +_sym_db.RegisterMessage(ListSinksResponse) + +GetSinkRequest = _reflection.GeneratedProtocolMessageType('GetSinkRequest', (_message.Message,), dict( + DESCRIPTOR = _GETSINKREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' + , + __doc__ = """The parameters to ``GetSink``. + + + Attributes: + sink_name: + Required. The resource name of the sink: :: + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: + ``"projects/my-project-id/sinks/my-sink-id"``. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.GetSinkRequest) + )) +_sym_db.RegisterMessage(GetSinkRequest) + +CreateSinkRequest = _reflection.GeneratedProtocolMessageType('CreateSinkRequest', (_message.Message,), dict( + DESCRIPTOR = _CREATESINKREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' + , + __doc__ = """The parameters to ``CreateSink``. + + + Attributes: + parent: + Required. The resource in which to create the sink: :: + "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" Examples: ``"projects/my-logging- + project"``, ``"organizations/123456789"``. + sink: + Required. The new sink, whose ``name`` parameter is a sink + identifier that is not already in use. + unique_writer_identity: + Optional. Determines the kind of IAM identity returned as + ``writer_identity`` in the new sink. If this value is omitted + or set to false, and if the sink's parent is a project, then + the value returned as ``writer_identity`` is the same group or + service account used by Stackdriver Logging before the + addition of writer identities to this API. The sink's + destination must be in the same project as the sink itself. + If this field is set to true, or if the sink is owned by a + non-project resource such as an organization, then the value + of ``writer_identity`` will be a unique service account used + only for exports from the new sink. For more information, see + ``writer_identity`` in [LogSink][google.logging.v2.LogSink]. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.CreateSinkRequest) + )) +_sym_db.RegisterMessage(CreateSinkRequest) + +UpdateSinkRequest = _reflection.GeneratedProtocolMessageType('UpdateSinkRequest', (_message.Message,), dict( + DESCRIPTOR = _UPDATESINKREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' + , + __doc__ = """The parameters to ``UpdateSink``. + + + Attributes: + sink_name: + Required. The full resource name of the sink to update, + including the parent resource and the sink identifier: :: + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: + ``"projects/my-project-id/sinks/my-sink-id"``. + sink: + Required. The updated sink, whose name is the same identifier + that appears as part of ``sink_name``. + unique_writer_identity: + Optional. See `sinks.create + `__ + for a description of this field. When updating a sink, the + effect of this field on the value of ``writer_identity`` in + the updated sink depends on both the old and new values of + this field: - If the old and new values of this field are + both false or both true, then there is no change to the + sink's ``writer_identity``. - If the old value is false and + the new value is true, then ``writer_identity`` is changed + to a unique service account. - It is an error if the old + value is true and the new value is set to false or + defaulted to false. + update_mask: + Optional. Field mask that specifies the fields in ``sink`` + that need an update. A sink field will be overwritten if, and + only if, it is in the update mask. ``name`` and output only + fields cannot be updated. An empty updateMask is temporarily + treated as using the following mask for backwards + compatibility purposes: destination,filter,includeChildren At + some point in the future, behavior will be removed and + specifying an empty updateMask will be an error. For a + detailed ``FieldMask`` definition, see + https://developers.google.com/protocol- + buffers/docs/reference/google.protobuf#fieldmask Example: + ``updateMask=filter``. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.UpdateSinkRequest) + )) +_sym_db.RegisterMessage(UpdateSinkRequest) + +DeleteSinkRequest = _reflection.GeneratedProtocolMessageType('DeleteSinkRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETESINKREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' + , + __doc__ = """The parameters to ``DeleteSink``. + + + Attributes: + sink_name: + Required. The full resource name of the sink to delete, + including the parent resource and the sink identifier: :: + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: + ``"projects/my-project-id/sinks/my-sink-id"``. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteSinkRequest) + )) +_sym_db.RegisterMessage(DeleteSinkRequest) + +LogExclusion = _reflection.GeneratedProtocolMessageType('LogExclusion', (_message.Message,), dict( + DESCRIPTOR = _LOGEXCLUSION, + __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' + , + __doc__ = """Specifies a set of log entries that are not to be stored in Stackdriver + Logging. If your project receives a large volume of logs, you might be + able to use exclusions to reduce your chargeable logs. Exclusions are + processed after log sinks, so you can export log entries before they are + excluded. Audit log entries and log entries from Amazon Web Services are + never excluded. + + + Attributes: + name: + Required. A client-assigned identifier, such as ``"load- + balancer-exclusion"``. Identifiers are limited to 100 + characters and can include only letters, digits, underscores, + hyphens, and periods. + description: + Optional. A description of this exclusion. + filter: + Required. An `advanced logs filter + `__ that matches the log + entries to be excluded. By using the `sample function + `__, you can + exclude less than 100% of the matching log entries. For + example, the following filter matches 99% of low-severity log + entries from load balancers: :: + "resource.type=http_load_balancer severity`__ to change the value + of this field. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.LogExclusion) + )) +_sym_db.RegisterMessage(LogExclusion) + +ListExclusionsRequest = _reflection.GeneratedProtocolMessageType('ListExclusionsRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTEXCLUSIONSREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' + , + __doc__ = """The parameters to ``ListExclusions``. + + + Attributes: + parent: + Required. The parent resource whose exclusions are to be + listed. :: "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + page_token: + Optional. If present, then retrieve the next batch of results + from the preceding call to this method. ``pageToken`` must be + the value of ``nextPageToken`` from the previous response. The + values of other method parameters should be identical to those + in the previous call. + page_size: + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more results + might be available. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListExclusionsRequest) + )) +_sym_db.RegisterMessage(ListExclusionsRequest) + +ListExclusionsResponse = _reflection.GeneratedProtocolMessageType('ListExclusionsResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTEXCLUSIONSRESPONSE, + __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' + , + __doc__ = """Result returned from ``ListExclusions``. + + + Attributes: + exclusions: + A list of exclusions. + next_page_token: + If there might be more results than appear in this response, + then ``nextPageToken`` is included. To get the next set of + results, call the same method again using the value of + ``nextPageToken`` as ``pageToken``. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListExclusionsResponse) + )) +_sym_db.RegisterMessage(ListExclusionsResponse) + +GetExclusionRequest = _reflection.GeneratedProtocolMessageType('GetExclusionRequest', (_message.Message,), dict( + DESCRIPTOR = _GETEXCLUSIONREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' + , + __doc__ = """The parameters to ``GetExclusion``. + + + Attributes: + name: + Required. The resource name of an existing exclusion: :: + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID + ]" "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + Example: ``"projects/my-project-id/exclusions/my-exclusion- + id"``. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.GetExclusionRequest) + )) +_sym_db.RegisterMessage(GetExclusionRequest) + +CreateExclusionRequest = _reflection.GeneratedProtocolMessageType('CreateExclusionRequest', (_message.Message,), dict( + DESCRIPTOR = _CREATEEXCLUSIONREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' + , + __doc__ = """The parameters to ``CreateExclusion``. + + + Attributes: + parent: + Required. The parent resource in which to create the + exclusion: :: "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" Examples: ``"projects/my-logging- + project"``, ``"organizations/123456789"``. + exclusion: + Required. The new exclusion, whose ``name`` parameter is an + exclusion name that is not already used in the parent + resource. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.CreateExclusionRequest) + )) +_sym_db.RegisterMessage(CreateExclusionRequest) + +UpdateExclusionRequest = _reflection.GeneratedProtocolMessageType('UpdateExclusionRequest', (_message.Message,), dict( + DESCRIPTOR = _UPDATEEXCLUSIONREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' + , + __doc__ = """The parameters to ``UpdateExclusion``. + + + Attributes: + name: + Required. The resource name of the exclusion to update: :: + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID + ]" "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + Example: ``"projects/my-project-id/exclusions/my-exclusion- + id"``. + exclusion: + Required. New values for the existing exclusion. Only the + fields specified in ``update_mask`` are relevant. + update_mask: + Required. A nonempty list of fields to change in the existing + exclusion. New values for the fields are taken from the + corresponding fields in the + [LogExclusion][google.logging.v2.LogExclusion] included in + this request. Fields not mentioned in ``update_mask`` are not + changed and are ignored in the request. For example, to + change the filter and description of an exclusion, specify an + ``update_mask`` of ``"filter,description"``. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.UpdateExclusionRequest) + )) +_sym_db.RegisterMessage(UpdateExclusionRequest) + +DeleteExclusionRequest = _reflection.GeneratedProtocolMessageType('DeleteExclusionRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETEEXCLUSIONREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_config_pb2' + , + __doc__ = """The parameters to ``DeleteExclusion``. + + + Attributes: + name: + Required. The resource name of an existing exclusion to + delete: :: + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID + ]" "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + Example: ``"projects/my-project-id/exclusions/my-exclusion- + id"``. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteExclusionRequest) + )) +_sym_db.RegisterMessage(DeleteExclusionRequest) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\025com.google.logging.v2B\022LoggingConfigProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2')) +try: + # THESE ELEMENTS WILL BE DEPRECATED. + # Please use the generated *_pb2_grpc.py files instead. + import grpc + from grpc.beta import implementations as beta_implementations + from grpc.beta import interfaces as beta_interfaces + from grpc.framework.common import cardinality + from grpc.framework.interfaces.face import utilities as face_utilities + + + class ConfigServiceV2Stub(object): + """Service for configuring sinks used to export log entries outside of + Stackdriver Logging. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.ListSinks = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListSinks', + request_serializer=ListSinksRequest.SerializeToString, + response_deserializer=ListSinksResponse.FromString, + ) + self.GetSink = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetSink', + request_serializer=GetSinkRequest.SerializeToString, + response_deserializer=LogSink.FromString, + ) + self.CreateSink = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateSink', + request_serializer=CreateSinkRequest.SerializeToString, + response_deserializer=LogSink.FromString, + ) + self.UpdateSink = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateSink', + request_serializer=UpdateSinkRequest.SerializeToString, + response_deserializer=LogSink.FromString, + ) + self.DeleteSink = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteSink', + request_serializer=DeleteSinkRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ListExclusions = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListExclusions', + request_serializer=ListExclusionsRequest.SerializeToString, + response_deserializer=ListExclusionsResponse.FromString, + ) + self.GetExclusion = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetExclusion', + request_serializer=GetExclusionRequest.SerializeToString, + response_deserializer=LogExclusion.FromString, + ) + self.CreateExclusion = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateExclusion', + request_serializer=CreateExclusionRequest.SerializeToString, + response_deserializer=LogExclusion.FromString, + ) + self.UpdateExclusion = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateExclusion', + request_serializer=UpdateExclusionRequest.SerializeToString, + response_deserializer=LogExclusion.FromString, + ) + self.DeleteExclusion = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteExclusion', + request_serializer=DeleteExclusionRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + + + class ConfigServiceV2Servicer(object): + """Service for configuring sinks used to export log entries outside of + Stackdriver Logging. + """ + + def ListSinks(self, request, context): + """Lists sinks. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetSink(self, request, context): + """Gets a sink. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateSink(self, request, context): + """Creates a sink that exports specified log entries to a destination. The + export of newly-ingested log entries begins immediately, unless the sink's + `writer_identity` is not permitted to write to the destination. A sink can + export log entries only from the resource owning the sink. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateSink(self, request, context): + """Updates a sink. This method replaces the following fields in the existing + sink with values from the new sink: `destination`, and `filter`. + The updated sink might also have a new `writer_identity`; see the + `unique_writer_identity` field. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteSink(self, request, context): + """Deletes a sink. If the sink has a unique `writer_identity`, then that + service account is also deleted. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListExclusions(self, request, context): + """Lists all the exclusions in a parent resource. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetExclusion(self, request, context): + """Gets the description of an exclusion. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateExclusion(self, request, context): + """Creates a new exclusion in a specified parent resource. + Only log entries belonging to that resource can be excluded. + You can have up to 10 exclusions in a resource. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateExclusion(self, request, context): + """Changes one or more properties of an existing exclusion. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteExclusion(self, request, context): + """Deletes an exclusion. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + + def add_ConfigServiceV2Servicer_to_server(servicer, server): + rpc_method_handlers = { + 'ListSinks': grpc.unary_unary_rpc_method_handler( + servicer.ListSinks, + request_deserializer=ListSinksRequest.FromString, + response_serializer=ListSinksResponse.SerializeToString, + ), + 'GetSink': grpc.unary_unary_rpc_method_handler( + servicer.GetSink, + request_deserializer=GetSinkRequest.FromString, + response_serializer=LogSink.SerializeToString, + ), + 'CreateSink': grpc.unary_unary_rpc_method_handler( + servicer.CreateSink, + request_deserializer=CreateSinkRequest.FromString, + response_serializer=LogSink.SerializeToString, + ), + 'UpdateSink': grpc.unary_unary_rpc_method_handler( + servicer.UpdateSink, + request_deserializer=UpdateSinkRequest.FromString, + response_serializer=LogSink.SerializeToString, + ), + 'DeleteSink': grpc.unary_unary_rpc_method_handler( + servicer.DeleteSink, + request_deserializer=DeleteSinkRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ListExclusions': grpc.unary_unary_rpc_method_handler( + servicer.ListExclusions, + request_deserializer=ListExclusionsRequest.FromString, + response_serializer=ListExclusionsResponse.SerializeToString, + ), + 'GetExclusion': grpc.unary_unary_rpc_method_handler( + servicer.GetExclusion, + request_deserializer=GetExclusionRequest.FromString, + response_serializer=LogExclusion.SerializeToString, + ), + 'CreateExclusion': grpc.unary_unary_rpc_method_handler( + servicer.CreateExclusion, + request_deserializer=CreateExclusionRequest.FromString, + response_serializer=LogExclusion.SerializeToString, + ), + 'UpdateExclusion': grpc.unary_unary_rpc_method_handler( + servicer.UpdateExclusion, + request_deserializer=UpdateExclusionRequest.FromString, + response_serializer=LogExclusion.SerializeToString, + ), + 'DeleteExclusion': grpc.unary_unary_rpc_method_handler( + servicer.DeleteExclusion, + request_deserializer=DeleteExclusionRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.logging.v2.ConfigServiceV2', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + class BetaConfigServiceV2Servicer(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """Service for configuring sinks used to export log entries outside of + Stackdriver Logging. + """ + def ListSinks(self, request, context): + """Lists sinks. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def GetSink(self, request, context): + """Gets a sink. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def CreateSink(self, request, context): + """Creates a sink that exports specified log entries to a destination. The + export of newly-ingested log entries begins immediately, unless the sink's + `writer_identity` is not permitted to write to the destination. A sink can + export log entries only from the resource owning the sink. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def UpdateSink(self, request, context): + """Updates a sink. This method replaces the following fields in the existing + sink with values from the new sink: `destination`, and `filter`. + The updated sink might also have a new `writer_identity`; see the + `unique_writer_identity` field. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def DeleteSink(self, request, context): + """Deletes a sink. If the sink has a unique `writer_identity`, then that + service account is also deleted. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ListExclusions(self, request, context): + """Lists all the exclusions in a parent resource. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def GetExclusion(self, request, context): + """Gets the description of an exclusion. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def CreateExclusion(self, request, context): + """Creates a new exclusion in a specified parent resource. + Only log entries belonging to that resource can be excluded. + You can have up to 10 exclusions in a resource. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def UpdateExclusion(self, request, context): + """Changes one or more properties of an existing exclusion. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def DeleteExclusion(self, request, context): + """Deletes an exclusion. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + + class BetaConfigServiceV2Stub(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """Service for configuring sinks used to export log entries outside of + Stackdriver Logging. + """ + def ListSinks(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists sinks. + """ + raise NotImplementedError() + ListSinks.future = None + def GetSink(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Gets a sink. + """ + raise NotImplementedError() + GetSink.future = None + def CreateSink(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Creates a sink that exports specified log entries to a destination. The + export of newly-ingested log entries begins immediately, unless the sink's + `writer_identity` is not permitted to write to the destination. A sink can + export log entries only from the resource owning the sink. + """ + raise NotImplementedError() + CreateSink.future = None + def UpdateSink(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Updates a sink. This method replaces the following fields in the existing + sink with values from the new sink: `destination`, and `filter`. + The updated sink might also have a new `writer_identity`; see the + `unique_writer_identity` field. + """ + raise NotImplementedError() + UpdateSink.future = None + def DeleteSink(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Deletes a sink. If the sink has a unique `writer_identity`, then that + service account is also deleted. + """ + raise NotImplementedError() + DeleteSink.future = None + def ListExclusions(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists all the exclusions in a parent resource. + """ + raise NotImplementedError() + ListExclusions.future = None + def GetExclusion(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Gets the description of an exclusion. + """ + raise NotImplementedError() + GetExclusion.future = None + def CreateExclusion(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Creates a new exclusion in a specified parent resource. + Only log entries belonging to that resource can be excluded. + You can have up to 10 exclusions in a resource. + """ + raise NotImplementedError() + CreateExclusion.future = None + def UpdateExclusion(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Changes one or more properties of an existing exclusion. + """ + raise NotImplementedError() + UpdateExclusion.future = None + def DeleteExclusion(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Deletes an exclusion. + """ + raise NotImplementedError() + DeleteExclusion.future = None + + + def beta_create_ConfigServiceV2_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_deserializers = { + ('google.logging.v2.ConfigServiceV2', 'CreateExclusion'): CreateExclusionRequest.FromString, + ('google.logging.v2.ConfigServiceV2', 'CreateSink'): CreateSinkRequest.FromString, + ('google.logging.v2.ConfigServiceV2', 'DeleteExclusion'): DeleteExclusionRequest.FromString, + ('google.logging.v2.ConfigServiceV2', 'DeleteSink'): DeleteSinkRequest.FromString, + ('google.logging.v2.ConfigServiceV2', 'GetExclusion'): GetExclusionRequest.FromString, + ('google.logging.v2.ConfigServiceV2', 'GetSink'): GetSinkRequest.FromString, + ('google.logging.v2.ConfigServiceV2', 'ListExclusions'): ListExclusionsRequest.FromString, + ('google.logging.v2.ConfigServiceV2', 'ListSinks'): ListSinksRequest.FromString, + ('google.logging.v2.ConfigServiceV2', 'UpdateExclusion'): UpdateExclusionRequest.FromString, + ('google.logging.v2.ConfigServiceV2', 'UpdateSink'): UpdateSinkRequest.FromString, + } + response_serializers = { + ('google.logging.v2.ConfigServiceV2', 'CreateExclusion'): LogExclusion.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'CreateSink'): LogSink.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'DeleteExclusion'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'DeleteSink'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'GetExclusion'): LogExclusion.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'GetSink'): LogSink.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'ListExclusions'): ListExclusionsResponse.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'ListSinks'): ListSinksResponse.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'UpdateExclusion'): LogExclusion.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'UpdateSink'): LogSink.SerializeToString, + } + method_implementations = { + ('google.logging.v2.ConfigServiceV2', 'CreateExclusion'): face_utilities.unary_unary_inline(servicer.CreateExclusion), + ('google.logging.v2.ConfigServiceV2', 'CreateSink'): face_utilities.unary_unary_inline(servicer.CreateSink), + ('google.logging.v2.ConfigServiceV2', 'DeleteExclusion'): face_utilities.unary_unary_inline(servicer.DeleteExclusion), + ('google.logging.v2.ConfigServiceV2', 'DeleteSink'): face_utilities.unary_unary_inline(servicer.DeleteSink), + ('google.logging.v2.ConfigServiceV2', 'GetExclusion'): face_utilities.unary_unary_inline(servicer.GetExclusion), + ('google.logging.v2.ConfigServiceV2', 'GetSink'): face_utilities.unary_unary_inline(servicer.GetSink), + ('google.logging.v2.ConfigServiceV2', 'ListExclusions'): face_utilities.unary_unary_inline(servicer.ListExclusions), + ('google.logging.v2.ConfigServiceV2', 'ListSinks'): face_utilities.unary_unary_inline(servicer.ListSinks), + ('google.logging.v2.ConfigServiceV2', 'UpdateExclusion'): face_utilities.unary_unary_inline(servicer.UpdateExclusion), + ('google.logging.v2.ConfigServiceV2', 'UpdateSink'): face_utilities.unary_unary_inline(servicer.UpdateSink), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + + def beta_create_ConfigServiceV2_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_serializers = { + ('google.logging.v2.ConfigServiceV2', 'CreateExclusion'): CreateExclusionRequest.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'CreateSink'): CreateSinkRequest.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'DeleteExclusion'): DeleteExclusionRequest.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'DeleteSink'): DeleteSinkRequest.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'GetExclusion'): GetExclusionRequest.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'GetSink'): GetSinkRequest.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'ListExclusions'): ListExclusionsRequest.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'ListSinks'): ListSinksRequest.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'UpdateExclusion'): UpdateExclusionRequest.SerializeToString, + ('google.logging.v2.ConfigServiceV2', 'UpdateSink'): UpdateSinkRequest.SerializeToString, + } + response_deserializers = { + ('google.logging.v2.ConfigServiceV2', 'CreateExclusion'): LogExclusion.FromString, + ('google.logging.v2.ConfigServiceV2', 'CreateSink'): LogSink.FromString, + ('google.logging.v2.ConfigServiceV2', 'DeleteExclusion'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.logging.v2.ConfigServiceV2', 'DeleteSink'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.logging.v2.ConfigServiceV2', 'GetExclusion'): LogExclusion.FromString, + ('google.logging.v2.ConfigServiceV2', 'GetSink'): LogSink.FromString, + ('google.logging.v2.ConfigServiceV2', 'ListExclusions'): ListExclusionsResponse.FromString, + ('google.logging.v2.ConfigServiceV2', 'ListSinks'): ListSinksResponse.FromString, + ('google.logging.v2.ConfigServiceV2', 'UpdateExclusion'): LogExclusion.FromString, + ('google.logging.v2.ConfigServiceV2', 'UpdateSink'): LogSink.FromString, + } + cardinalities = { + 'CreateExclusion': cardinality.Cardinality.UNARY_UNARY, + 'CreateSink': cardinality.Cardinality.UNARY_UNARY, + 'DeleteExclusion': cardinality.Cardinality.UNARY_UNARY, + 'DeleteSink': cardinality.Cardinality.UNARY_UNARY, + 'GetExclusion': cardinality.Cardinality.UNARY_UNARY, + 'GetSink': cardinality.Cardinality.UNARY_UNARY, + 'ListExclusions': cardinality.Cardinality.UNARY_UNARY, + 'ListSinks': cardinality.Cardinality.UNARY_UNARY, + 'UpdateExclusion': cardinality.Cardinality.UNARY_UNARY, + 'UpdateSink': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.logging.v2.ConfigServiceV2', cardinalities, options=stub_options) +except ImportError: + pass +# @@protoc_insertion_point(module_scope) diff --git a/logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py b/logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py new file mode 100644 index 000000000000..4f218e7d43b6 --- /dev/null +++ b/logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py @@ -0,0 +1,211 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +import google.cloud.logging_v2.proto.logging_config_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2 +import google.protobuf.empty_pb2 as google_dot_protobuf_dot_empty__pb2 + + +class ConfigServiceV2Stub(object): + """Service for configuring sinks used to export log entries outside of + Stackdriver Logging. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.ListSinks = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListSinks', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListSinksRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListSinksResponse.FromString, + ) + self.GetSink = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetSink', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.GetSinkRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.FromString, + ) + self.CreateSink = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateSink', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CreateSinkRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.FromString, + ) + self.UpdateSink = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateSink', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.UpdateSinkRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.FromString, + ) + self.DeleteSink = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteSink', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DeleteSinkRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ListExclusions = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListExclusions', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListExclusionsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListExclusionsResponse.FromString, + ) + self.GetExclusion = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetExclusion', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.GetExclusionRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.FromString, + ) + self.CreateExclusion = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateExclusion', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CreateExclusionRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.FromString, + ) + self.UpdateExclusion = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateExclusion', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.UpdateExclusionRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.FromString, + ) + self.DeleteExclusion = channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteExclusion', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DeleteExclusionRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + + +class ConfigServiceV2Servicer(object): + """Service for configuring sinks used to export log entries outside of + Stackdriver Logging. + """ + + def ListSinks(self, request, context): + """Lists sinks. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetSink(self, request, context): + """Gets a sink. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateSink(self, request, context): + """Creates a sink that exports specified log entries to a destination. The + export of newly-ingested log entries begins immediately, unless the sink's + `writer_identity` is not permitted to write to the destination. A sink can + export log entries only from the resource owning the sink. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateSink(self, request, context): + """Updates a sink. This method replaces the following fields in the existing + sink with values from the new sink: `destination`, and `filter`. + The updated sink might also have a new `writer_identity`; see the + `unique_writer_identity` field. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteSink(self, request, context): + """Deletes a sink. If the sink has a unique `writer_identity`, then that + service account is also deleted. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListExclusions(self, request, context): + """Lists all the exclusions in a parent resource. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetExclusion(self, request, context): + """Gets the description of an exclusion. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateExclusion(self, request, context): + """Creates a new exclusion in a specified parent resource. + Only log entries belonging to that resource can be excluded. + You can have up to 10 exclusions in a resource. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateExclusion(self, request, context): + """Changes one or more properties of an existing exclusion. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteExclusion(self, request, context): + """Deletes an exclusion. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_ConfigServiceV2Servicer_to_server(servicer, server): + rpc_method_handlers = { + 'ListSinks': grpc.unary_unary_rpc_method_handler( + servicer.ListSinks, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListSinksRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListSinksResponse.SerializeToString, + ), + 'GetSink': grpc.unary_unary_rpc_method_handler( + servicer.GetSink, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.GetSinkRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.SerializeToString, + ), + 'CreateSink': grpc.unary_unary_rpc_method_handler( + servicer.CreateSink, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CreateSinkRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.SerializeToString, + ), + 'UpdateSink': grpc.unary_unary_rpc_method_handler( + servicer.UpdateSink, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.UpdateSinkRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogSink.SerializeToString, + ), + 'DeleteSink': grpc.unary_unary_rpc_method_handler( + servicer.DeleteSink, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DeleteSinkRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ListExclusions': grpc.unary_unary_rpc_method_handler( + servicer.ListExclusions, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListExclusionsRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListExclusionsResponse.SerializeToString, + ), + 'GetExclusion': grpc.unary_unary_rpc_method_handler( + servicer.GetExclusion, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.GetExclusionRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.SerializeToString, + ), + 'CreateExclusion': grpc.unary_unary_rpc_method_handler( + servicer.CreateExclusion, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.CreateExclusionRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.SerializeToString, + ), + 'UpdateExclusion': grpc.unary_unary_rpc_method_handler( + servicer.UpdateExclusion, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.UpdateExclusionRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.LogExclusion.SerializeToString, + ), + 'DeleteExclusion': grpc.unary_unary_rpc_method_handler( + servicer.DeleteExclusion, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DeleteExclusionRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.logging.v2.ConfigServiceV2', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py b/logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py new file mode 100644 index 000000000000..76a68b0c83fe --- /dev/null +++ b/logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py @@ -0,0 +1,895 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/logging_v2/proto/logging_metrics.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import distribution_pb2 as google_dot_api_dot_distribution__pb2 +from google.api import metric_pb2 as google_dot_api_dot_metric__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/logging_v2/proto/logging_metrics.proto', + package='google.logging.v2', + syntax='proto3', + serialized_pb=_b('\n3google/cloud/logging_v2/proto/logging_metrics.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x1dgoogle/api/distribution.proto\x1a\x17google/api/metric.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\"\xad\x03\n\tLogMetric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x37\n\x11metric_descriptor\x18\x05 \x01(\x0b\x32\x1c.google.api.MetricDescriptor\x12\x17\n\x0fvalue_extractor\x18\x06 \x01(\t\x12K\n\x10label_extractors\x18\x07 \x03(\x0b\x32\x31.google.logging.v2.LogMetric.LabelExtractorsEntry\x12>\n\x0e\x62ucket_options\x18\x08 \x01(\x0b\x32&.google.api.Distribution.BucketOptions\x12\x38\n\x07version\x18\x04 \x01(\x0e\x32\'.google.logging.v2.LogMetric.ApiVersion\x1a\x36\n\x14LabelExtractorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x1c\n\nApiVersion\x12\x06\n\x02V2\x10\x00\x12\x06\n\x02V1\x10\x01\"N\n\x15ListLogMetricsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\"`\n\x16ListLogMetricsResponse\x12-\n\x07metrics\x18\x01 \x03(\x0b\x32\x1c.google.logging.v2.LogMetric\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"*\n\x13GetLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t\"V\n\x16\x43reateLogMetricRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric\"[\n\x16UpdateLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric\"-\n\x16\x44\x65leteLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t2\xd4\x05\n\x10MetricsServiceV2\x12\x8e\x01\n\x0eListLogMetrics\x12(.google.logging.v2.ListLogMetricsRequest\x1a).google.logging.v2.ListLogMetricsResponse\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v2/{parent=projects/*}/metrics\x12\x84\x01\n\x0cGetLogMetric\x12&.google.logging.v2.GetLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric\".\x82\xd3\xe4\x93\x02(\x12&/v2/{metric_name=projects/*/metrics/*}\x12\x8b\x01\n\x0f\x43reateLogMetric\x12).google.logging.v2.CreateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric\"/\x82\xd3\xe4\x93\x02)\"\x1f/v2/{parent=projects/*}/metrics:\x06metric\x12\x92\x01\n\x0fUpdateLogMetric\x12).google.logging.v2.UpdateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric\"6\x82\xd3\xe4\x93\x02\x30\x1a&/v2/{metric_name=projects/*/metrics/*}:\x06metric\x12\x84\x01\n\x0f\x44\x65leteLogMetric\x12).google.logging.v2.DeleteLogMetricRequest\x1a\x16.google.protobuf.Empty\".\x82\xd3\xe4\x93\x02(*&/v2/{metric_name=projects/*/metrics/*}B\x9f\x01\n\x15\x63om.google.logging.v2B\x13LoggingMetricsProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_api_dot_distribution__pb2.DESCRIPTOR,google_dot_api_dot_metric__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + +_LOGMETRIC_APIVERSION = _descriptor.EnumDescriptor( + name='ApiVersion', + full_name='google.logging.v2.LogMetric.ApiVersion', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='V2', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='V1', index=1, number=1, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=625, + serialized_end=653, +) +_sym_db.RegisterEnumDescriptor(_LOGMETRIC_APIVERSION) + + +_LOGMETRIC_LABELEXTRACTORSENTRY = _descriptor.Descriptor( + name='LabelExtractorsEntry', + full_name='google.logging.v2.LogMetric.LabelExtractorsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.logging.v2.LogMetric.LabelExtractorsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.logging.v2.LogMetric.LabelExtractorsEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=569, + serialized_end=623, +) + +_LOGMETRIC = _descriptor.Descriptor( + name='LogMetric', + full_name='google.logging.v2.LogMetric', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.logging.v2.LogMetric.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='description', full_name='google.logging.v2.LogMetric.description', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='filter', full_name='google.logging.v2.LogMetric.filter', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='metric_descriptor', full_name='google.logging.v2.LogMetric.metric_descriptor', index=3, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value_extractor', full_name='google.logging.v2.LogMetric.value_extractor', index=4, + number=6, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='label_extractors', full_name='google.logging.v2.LogMetric.label_extractors', index=5, + number=7, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='bucket_options', full_name='google.logging.v2.LogMetric.bucket_options', index=6, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='version', full_name='google.logging.v2.LogMetric.version', index=7, + number=4, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_LOGMETRIC_LABELEXTRACTORSENTRY, ], + enum_types=[ + _LOGMETRIC_APIVERSION, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=224, + serialized_end=653, +) + + +_LISTLOGMETRICSREQUEST = _descriptor.Descriptor( + name='ListLogMetricsRequest', + full_name='google.logging.v2.ListLogMetricsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.logging.v2.ListLogMetricsRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.logging.v2.ListLogMetricsRequest.page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.logging.v2.ListLogMetricsRequest.page_size', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=655, + serialized_end=733, +) + + +_LISTLOGMETRICSRESPONSE = _descriptor.Descriptor( + name='ListLogMetricsResponse', + full_name='google.logging.v2.ListLogMetricsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='metrics', full_name='google.logging.v2.ListLogMetricsResponse.metrics', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.logging.v2.ListLogMetricsResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=735, + serialized_end=831, +) + + +_GETLOGMETRICREQUEST = _descriptor.Descriptor( + name='GetLogMetricRequest', + full_name='google.logging.v2.GetLogMetricRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='metric_name', full_name='google.logging.v2.GetLogMetricRequest.metric_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=833, + serialized_end=875, +) + + +_CREATELOGMETRICREQUEST = _descriptor.Descriptor( + name='CreateLogMetricRequest', + full_name='google.logging.v2.CreateLogMetricRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.logging.v2.CreateLogMetricRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='metric', full_name='google.logging.v2.CreateLogMetricRequest.metric', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=877, + serialized_end=963, +) + + +_UPDATELOGMETRICREQUEST = _descriptor.Descriptor( + name='UpdateLogMetricRequest', + full_name='google.logging.v2.UpdateLogMetricRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='metric_name', full_name='google.logging.v2.UpdateLogMetricRequest.metric_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='metric', full_name='google.logging.v2.UpdateLogMetricRequest.metric', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=965, + serialized_end=1056, +) + + +_DELETELOGMETRICREQUEST = _descriptor.Descriptor( + name='DeleteLogMetricRequest', + full_name='google.logging.v2.DeleteLogMetricRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='metric_name', full_name='google.logging.v2.DeleteLogMetricRequest.metric_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1058, + serialized_end=1103, +) + +_LOGMETRIC_LABELEXTRACTORSENTRY.containing_type = _LOGMETRIC +_LOGMETRIC.fields_by_name['metric_descriptor'].message_type = google_dot_api_dot_metric__pb2._METRICDESCRIPTOR +_LOGMETRIC.fields_by_name['label_extractors'].message_type = _LOGMETRIC_LABELEXTRACTORSENTRY +_LOGMETRIC.fields_by_name['bucket_options'].message_type = google_dot_api_dot_distribution__pb2._DISTRIBUTION_BUCKETOPTIONS +_LOGMETRIC.fields_by_name['version'].enum_type = _LOGMETRIC_APIVERSION +_LOGMETRIC_APIVERSION.containing_type = _LOGMETRIC +_LISTLOGMETRICSRESPONSE.fields_by_name['metrics'].message_type = _LOGMETRIC +_CREATELOGMETRICREQUEST.fields_by_name['metric'].message_type = _LOGMETRIC +_UPDATELOGMETRICREQUEST.fields_by_name['metric'].message_type = _LOGMETRIC +DESCRIPTOR.message_types_by_name['LogMetric'] = _LOGMETRIC +DESCRIPTOR.message_types_by_name['ListLogMetricsRequest'] = _LISTLOGMETRICSREQUEST +DESCRIPTOR.message_types_by_name['ListLogMetricsResponse'] = _LISTLOGMETRICSRESPONSE +DESCRIPTOR.message_types_by_name['GetLogMetricRequest'] = _GETLOGMETRICREQUEST +DESCRIPTOR.message_types_by_name['CreateLogMetricRequest'] = _CREATELOGMETRICREQUEST +DESCRIPTOR.message_types_by_name['UpdateLogMetricRequest'] = _UPDATELOGMETRICREQUEST +DESCRIPTOR.message_types_by_name['DeleteLogMetricRequest'] = _DELETELOGMETRICREQUEST + +LogMetric = _reflection.GeneratedProtocolMessageType('LogMetric', (_message.Message,), dict( + + LabelExtractorsEntry = _reflection.GeneratedProtocolMessageType('LabelExtractorsEntry', (_message.Message,), dict( + DESCRIPTOR = _LOGMETRIC_LABELEXTRACTORSENTRY, + __module__ = 'google.cloud.logging_v2.proto.logging_metrics_pb2' + # @@protoc_insertion_point(class_scope:google.logging.v2.LogMetric.LabelExtractorsEntry) + )) + , + DESCRIPTOR = _LOGMETRIC, + __module__ = 'google.cloud.logging_v2.proto.logging_metrics_pb2' + , + __doc__ = """Describes a logs-based metric. The value of the metric is the number of + log entries that match a logs filter in a given time interval. + + Logs-based metric can also be used to extract values from logs and + create a a distribution of the values. The distribution records the + statistics of the extracted values along with an optional histogram of + the values as specified by the bucket options. + + + Attributes: + name: + Required. The client-assigned metric identifier. Examples: + ``"error_count"``, ``"nginx/requests"``. Metric identifiers + are limited to 100 characters and can include only the + following characters: ``A-Z``, ``a-z``, ``0-9``, and the + special characters ``_-.,+!*',()%/``. The forward-slash + character (``/``) denotes a hierarchy of name pieces, and it + cannot be the first character of the name. The metric + identifier in this field must not be `URL-encoded + `__. However, + when the metric identifier appears as the ``[METRIC_ID]`` part + of a ``metric_name`` API parameter, then the metric identifier + must be URL-encoded. Example: ``"projects/my- + project/metrics/nginx%2Frequests"``. + description: + Optional. A description of this metric, which is used in + documentation. + filter: + Required. An `advanced logs filter + `__ which is used to + match log entries. Example: :: "resource.type=gae_app + AND severity>=ERROR" The maximum length of the filter is + 20000 characters. + metric_descriptor: + Optional. The metric descriptor associated with the logs-based + metric. If unspecified, it uses a default metric descriptor + with a DELTA metric kind, INT64 value type, with no labels and + a unit of "1". Such a metric counts the number of log entries + matching the ``filter`` expression. The ``name``, ``type``, + and ``description`` fields in the ``metric_descriptor`` are + output only, and is constructed using the ``name`` and + ``description`` field in the LogMetric. To create a logs- + based metric that records a distribution of log values, a + DELTA metric kind with a DISTRIBUTION value type must be used + along with a ``value_extractor`` expression in the LogMetric. + Each label in the metric descriptor must have a matching label + name as the key and an extractor expression as the value in + the ``label_extractors`` map. The ``metric_kind`` and + ``value_type`` fields in the ``metric_descriptor`` cannot be + updated once initially configured. New labels can be added in + the ``metric_descriptor``, but existing labels cannot be + modified except for their description. + value_extractor: + Optional. A ``value_extractor`` is required when using a + distribution logs-based metric to extract the values to record + from a log entry. Two functions are supported for value + extraction: ``EXTRACT(field)`` or ``REGEXP_EXTRACT(field, + regex)``. The argument are: 1. field: The name of the log + entry field from which the value is to be extracted. 2. regex: + A regular expression using the Google RE2 syntax + (https://github.com/google/re2/wiki/Syntax) with a single + capture group to extract data from the specified log entry + field. The value of the field is converted to a string before + applying the regex. It is an error to specify a regex that + does not include exactly one capture group. The result of the + extraction must be convertible to a double type, as the + distribution always records double values. If either the + extraction or the conversion to double fails, then those + values are not recorded in the distribution. Example: + ``REGEXP_EXTRACT(jsonPayload.request, ".*quantity=(\d+).*")`` + label_extractors: + Optional. A map from a label key string to an extractor + expression which is used to extract data from a log entry + field and assign as the label value. Each label key specified + in the LabelDescriptor must have an associated extractor + expression in this map. The syntax of the extractor expression + is the same as for the ``value_extractor`` field. The + extracted value is converted to the type defined in the label + descriptor. If the either the extraction or the type + conversion fails, the label will have a default value. The + default value for a string label is an empty string, for an + integer label its 0, and for a boolean label its ``false``. + Note that there are upper bounds on the maximum number of + labels and the number of active time series that are allowed + in a project. + bucket_options: + Optional. The ``bucket_options`` are required when the logs- + based metric is using a DISTRIBUTION value type and it + describes the bucket boundaries used to create a histogram of + the extracted values. + version: + Deprecated. The API version that created or updated this + metric. The v2 format is used by default and cannot be + changed. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.LogMetric) + )) +_sym_db.RegisterMessage(LogMetric) +_sym_db.RegisterMessage(LogMetric.LabelExtractorsEntry) + +ListLogMetricsRequest = _reflection.GeneratedProtocolMessageType('ListLogMetricsRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTLOGMETRICSREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_metrics_pb2' + , + __doc__ = """The parameters to ListLogMetrics. + + + Attributes: + parent: + Required. The name of the project containing the metrics: :: + "projects/[PROJECT_ID]" + page_token: + Optional. If present, then retrieve the next batch of results + from the preceding call to this method. ``pageToken`` must be + the value of ``nextPageToken`` from the previous response. The + values of other method parameters should be identical to those + in the previous call. + page_size: + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more results + might be available. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogMetricsRequest) + )) +_sym_db.RegisterMessage(ListLogMetricsRequest) + +ListLogMetricsResponse = _reflection.GeneratedProtocolMessageType('ListLogMetricsResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTLOGMETRICSRESPONSE, + __module__ = 'google.cloud.logging_v2.proto.logging_metrics_pb2' + , + __doc__ = """Result returned from ListLogMetrics. + + + Attributes: + metrics: + A list of logs-based metrics. + next_page_token: + If there might be more results than appear in this response, + then ``nextPageToken`` is included. To get the next set of + results, call this method again using the value of + ``nextPageToken`` as ``pageToken``. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogMetricsResponse) + )) +_sym_db.RegisterMessage(ListLogMetricsResponse) + +GetLogMetricRequest = _reflection.GeneratedProtocolMessageType('GetLogMetricRequest', (_message.Message,), dict( + DESCRIPTOR = _GETLOGMETRICREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_metrics_pb2' + , + __doc__ = """The parameters to GetLogMetric. + + + Attributes: + metric_name: + The resource name of the desired metric: :: + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.GetLogMetricRequest) + )) +_sym_db.RegisterMessage(GetLogMetricRequest) + +CreateLogMetricRequest = _reflection.GeneratedProtocolMessageType('CreateLogMetricRequest', (_message.Message,), dict( + DESCRIPTOR = _CREATELOGMETRICREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_metrics_pb2' + , + __doc__ = """The parameters to CreateLogMetric. + + + Attributes: + parent: + The resource name of the project in which to create the + metric: :: "projects/[PROJECT_ID]" The new metric must + be provided in the request. + metric: + The new logs-based metric, which must not have an identifier + that already exists. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.CreateLogMetricRequest) + )) +_sym_db.RegisterMessage(CreateLogMetricRequest) + +UpdateLogMetricRequest = _reflection.GeneratedProtocolMessageType('UpdateLogMetricRequest', (_message.Message,), dict( + DESCRIPTOR = _UPDATELOGMETRICREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_metrics_pb2' + , + __doc__ = """The parameters to UpdateLogMetric. + + + Attributes: + metric_name: + The resource name of the metric to update: :: + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" The updated + metric must be provided in the request and it's ``name`` field + must be the same as ``[METRIC_ID]`` If the metric does not + exist in ``[PROJECT_ID]``, then a new metric is created. + metric: + The updated metric. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.UpdateLogMetricRequest) + )) +_sym_db.RegisterMessage(UpdateLogMetricRequest) + +DeleteLogMetricRequest = _reflection.GeneratedProtocolMessageType('DeleteLogMetricRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETELOGMETRICREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_metrics_pb2' + , + __doc__ = """The parameters to DeleteLogMetric. + + + Attributes: + metric_name: + The resource name of the metric to delete: :: + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteLogMetricRequest) + )) +_sym_db.RegisterMessage(DeleteLogMetricRequest) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\025com.google.logging.v2B\023LoggingMetricsProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2')) +_LOGMETRIC_LABELEXTRACTORSENTRY.has_options = True +_LOGMETRIC_LABELEXTRACTORSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +try: + # THESE ELEMENTS WILL BE DEPRECATED. + # Please use the generated *_pb2_grpc.py files instead. + import grpc + from grpc.beta import implementations as beta_implementations + from grpc.beta import interfaces as beta_interfaces + from grpc.framework.common import cardinality + from grpc.framework.interfaces.face import utilities as face_utilities + + + class MetricsServiceV2Stub(object): + """Service for configuring logs-based metrics. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.ListLogMetrics = channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/ListLogMetrics', + request_serializer=ListLogMetricsRequest.SerializeToString, + response_deserializer=ListLogMetricsResponse.FromString, + ) + self.GetLogMetric = channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/GetLogMetric', + request_serializer=GetLogMetricRequest.SerializeToString, + response_deserializer=LogMetric.FromString, + ) + self.CreateLogMetric = channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/CreateLogMetric', + request_serializer=CreateLogMetricRequest.SerializeToString, + response_deserializer=LogMetric.FromString, + ) + self.UpdateLogMetric = channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/UpdateLogMetric', + request_serializer=UpdateLogMetricRequest.SerializeToString, + response_deserializer=LogMetric.FromString, + ) + self.DeleteLogMetric = channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/DeleteLogMetric', + request_serializer=DeleteLogMetricRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + + + class MetricsServiceV2Servicer(object): + """Service for configuring logs-based metrics. + """ + + def ListLogMetrics(self, request, context): + """Lists logs-based metrics. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetLogMetric(self, request, context): + """Gets a logs-based metric. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateLogMetric(self, request, context): + """Creates a logs-based metric. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateLogMetric(self, request, context): + """Creates or updates a logs-based metric. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteLogMetric(self, request, context): + """Deletes a logs-based metric. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + + def add_MetricsServiceV2Servicer_to_server(servicer, server): + rpc_method_handlers = { + 'ListLogMetrics': grpc.unary_unary_rpc_method_handler( + servicer.ListLogMetrics, + request_deserializer=ListLogMetricsRequest.FromString, + response_serializer=ListLogMetricsResponse.SerializeToString, + ), + 'GetLogMetric': grpc.unary_unary_rpc_method_handler( + servicer.GetLogMetric, + request_deserializer=GetLogMetricRequest.FromString, + response_serializer=LogMetric.SerializeToString, + ), + 'CreateLogMetric': grpc.unary_unary_rpc_method_handler( + servicer.CreateLogMetric, + request_deserializer=CreateLogMetricRequest.FromString, + response_serializer=LogMetric.SerializeToString, + ), + 'UpdateLogMetric': grpc.unary_unary_rpc_method_handler( + servicer.UpdateLogMetric, + request_deserializer=UpdateLogMetricRequest.FromString, + response_serializer=LogMetric.SerializeToString, + ), + 'DeleteLogMetric': grpc.unary_unary_rpc_method_handler( + servicer.DeleteLogMetric, + request_deserializer=DeleteLogMetricRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.logging.v2.MetricsServiceV2', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + class BetaMetricsServiceV2Servicer(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """Service for configuring logs-based metrics. + """ + def ListLogMetrics(self, request, context): + """Lists logs-based metrics. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def GetLogMetric(self, request, context): + """Gets a logs-based metric. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def CreateLogMetric(self, request, context): + """Creates a logs-based metric. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def UpdateLogMetric(self, request, context): + """Creates or updates a logs-based metric. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def DeleteLogMetric(self, request, context): + """Deletes a logs-based metric. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + + class BetaMetricsServiceV2Stub(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """Service for configuring logs-based metrics. + """ + def ListLogMetrics(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists logs-based metrics. + """ + raise NotImplementedError() + ListLogMetrics.future = None + def GetLogMetric(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Gets a logs-based metric. + """ + raise NotImplementedError() + GetLogMetric.future = None + def CreateLogMetric(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Creates a logs-based metric. + """ + raise NotImplementedError() + CreateLogMetric.future = None + def UpdateLogMetric(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Creates or updates a logs-based metric. + """ + raise NotImplementedError() + UpdateLogMetric.future = None + def DeleteLogMetric(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Deletes a logs-based metric. + """ + raise NotImplementedError() + DeleteLogMetric.future = None + + + def beta_create_MetricsServiceV2_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_deserializers = { + ('google.logging.v2.MetricsServiceV2', 'CreateLogMetric'): CreateLogMetricRequest.FromString, + ('google.logging.v2.MetricsServiceV2', 'DeleteLogMetric'): DeleteLogMetricRequest.FromString, + ('google.logging.v2.MetricsServiceV2', 'GetLogMetric'): GetLogMetricRequest.FromString, + ('google.logging.v2.MetricsServiceV2', 'ListLogMetrics'): ListLogMetricsRequest.FromString, + ('google.logging.v2.MetricsServiceV2', 'UpdateLogMetric'): UpdateLogMetricRequest.FromString, + } + response_serializers = { + ('google.logging.v2.MetricsServiceV2', 'CreateLogMetric'): LogMetric.SerializeToString, + ('google.logging.v2.MetricsServiceV2', 'DeleteLogMetric'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.logging.v2.MetricsServiceV2', 'GetLogMetric'): LogMetric.SerializeToString, + ('google.logging.v2.MetricsServiceV2', 'ListLogMetrics'): ListLogMetricsResponse.SerializeToString, + ('google.logging.v2.MetricsServiceV2', 'UpdateLogMetric'): LogMetric.SerializeToString, + } + method_implementations = { + ('google.logging.v2.MetricsServiceV2', 'CreateLogMetric'): face_utilities.unary_unary_inline(servicer.CreateLogMetric), + ('google.logging.v2.MetricsServiceV2', 'DeleteLogMetric'): face_utilities.unary_unary_inline(servicer.DeleteLogMetric), + ('google.logging.v2.MetricsServiceV2', 'GetLogMetric'): face_utilities.unary_unary_inline(servicer.GetLogMetric), + ('google.logging.v2.MetricsServiceV2', 'ListLogMetrics'): face_utilities.unary_unary_inline(servicer.ListLogMetrics), + ('google.logging.v2.MetricsServiceV2', 'UpdateLogMetric'): face_utilities.unary_unary_inline(servicer.UpdateLogMetric), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + + def beta_create_MetricsServiceV2_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_serializers = { + ('google.logging.v2.MetricsServiceV2', 'CreateLogMetric'): CreateLogMetricRequest.SerializeToString, + ('google.logging.v2.MetricsServiceV2', 'DeleteLogMetric'): DeleteLogMetricRequest.SerializeToString, + ('google.logging.v2.MetricsServiceV2', 'GetLogMetric'): GetLogMetricRequest.SerializeToString, + ('google.logging.v2.MetricsServiceV2', 'ListLogMetrics'): ListLogMetricsRequest.SerializeToString, + ('google.logging.v2.MetricsServiceV2', 'UpdateLogMetric'): UpdateLogMetricRequest.SerializeToString, + } + response_deserializers = { + ('google.logging.v2.MetricsServiceV2', 'CreateLogMetric'): LogMetric.FromString, + ('google.logging.v2.MetricsServiceV2', 'DeleteLogMetric'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.logging.v2.MetricsServiceV2', 'GetLogMetric'): LogMetric.FromString, + ('google.logging.v2.MetricsServiceV2', 'ListLogMetrics'): ListLogMetricsResponse.FromString, + ('google.logging.v2.MetricsServiceV2', 'UpdateLogMetric'): LogMetric.FromString, + } + cardinalities = { + 'CreateLogMetric': cardinality.Cardinality.UNARY_UNARY, + 'DeleteLogMetric': cardinality.Cardinality.UNARY_UNARY, + 'GetLogMetric': cardinality.Cardinality.UNARY_UNARY, + 'ListLogMetrics': cardinality.Cardinality.UNARY_UNARY, + 'UpdateLogMetric': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.logging.v2.MetricsServiceV2', cardinalities, options=stub_options) +except ImportError: + pass +# @@protoc_insertion_point(module_scope) diff --git a/logging/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py b/logging/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py new file mode 100644 index 000000000000..b464e0b4b3a4 --- /dev/null +++ b/logging/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py @@ -0,0 +1,115 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +import google.cloud.logging_v2.proto.logging_metrics_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2 +import google.protobuf.empty_pb2 as google_dot_protobuf_dot_empty__pb2 + + +class MetricsServiceV2Stub(object): + """Service for configuring logs-based metrics. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.ListLogMetrics = channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/ListLogMetrics', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsResponse.FromString, + ) + self.GetLogMetric = channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/GetLogMetric', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.GetLogMetricRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.FromString, + ) + self.CreateLogMetric = channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/CreateLogMetric', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.CreateLogMetricRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.FromString, + ) + self.UpdateLogMetric = channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/UpdateLogMetric', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.UpdateLogMetricRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.FromString, + ) + self.DeleteLogMetric = channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/DeleteLogMetric', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.DeleteLogMetricRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + + +class MetricsServiceV2Servicer(object): + """Service for configuring logs-based metrics. + """ + + def ListLogMetrics(self, request, context): + """Lists logs-based metrics. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetLogMetric(self, request, context): + """Gets a logs-based metric. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateLogMetric(self, request, context): + """Creates a logs-based metric. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateLogMetric(self, request, context): + """Creates or updates a logs-based metric. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteLogMetric(self, request, context): + """Deletes a logs-based metric. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_MetricsServiceV2Servicer_to_server(servicer, server): + rpc_method_handlers = { + 'ListLogMetrics': grpc.unary_unary_rpc_method_handler( + servicer.ListLogMetrics, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsResponse.SerializeToString, + ), + 'GetLogMetric': grpc.unary_unary_rpc_method_handler( + servicer.GetLogMetric, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.GetLogMetricRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.SerializeToString, + ), + 'CreateLogMetric': grpc.unary_unary_rpc_method_handler( + servicer.CreateLogMetric, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.CreateLogMetricRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.SerializeToString, + ), + 'UpdateLogMetric': grpc.unary_unary_rpc_method_handler( + servicer.UpdateLogMetric, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.UpdateLogMetricRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.SerializeToString, + ), + 'DeleteLogMetric': grpc.unary_unary_rpc_method_handler( + servicer.DeleteLogMetric, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.DeleteLogMetricRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.logging.v2.MetricsServiceV2', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/logging/google/cloud/logging_v2/proto/logging_pb2.py b/logging/google/cloud/logging_v2/proto/logging_pb2.py new file mode 100644 index 000000000000..9665bcdee9f2 --- /dev/null +++ b/logging/google/cloud/logging_v2/proto/logging_pb2.py @@ -0,0 +1,1146 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/logging_v2/proto/logging.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2 +from google.cloud.logging_v2.proto import log_entry_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/logging_v2/proto/logging.proto', + package='google.logging.v2', + syntax='proto3', + serialized_pb=_b('\n+google/cloud/logging_v2/proto/logging.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a#google/api/monitored_resource.proto\x1a-google/cloud/logging_v2/proto/log_entry.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\"$\n\x10\x44\x65leteLogRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t\"\x98\x02\n\x16WriteLogEntriesRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t\x12/\n\x08resource\x18\x02 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12\x45\n\x06labels\x18\x03 \x03(\x0b\x32\x35.google.logging.v2.WriteLogEntriesRequest.LabelsEntry\x12,\n\x07\x65ntries\x18\x04 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fpartial_success\x18\x05 \x01(\x08\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x19\n\x17WriteLogEntriesResponse\"\xc8\x01\n\x1cWriteLogEntriesPartialErrors\x12]\n\x10log_entry_errors\x18\x01 \x03(\x0b\x32\x43.google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry\x1aI\n\x13LogEntryErrorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status:\x02\x38\x01\"\x8d\x01\n\x15ListLogEntriesRequest\x12\x13\n\x0bproject_ids\x18\x01 \x03(\t\x12\x16\n\x0eresource_names\x18\x08 \x03(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x10\n\x08order_by\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t\"_\n\x16ListLogEntriesResponse\x12,\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"P\n\'ListMonitoredResourceDescriptorsRequest\x12\x11\n\tpage_size\x18\x01 \x01(\x05\x12\x12\n\npage_token\x18\x02 \x01(\t\"\x8a\x01\n(ListMonitoredResourceDescriptorsResponse\x12\x45\n\x14resource_descriptors\x18\x01 \x03(\x0b\x32\'.google.api.MonitoredResourceDescriptor\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"H\n\x0fListLogsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\">\n\x10ListLogsResponse\x12\x11\n\tlog_names\x18\x03 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\xd5\x05\n\x10LoggingServiceV2\x12w\n\tDeleteLog\x12#.google.logging.v2.DeleteLogRequest\x1a\x16.google.protobuf.Empty\"-\x82\xd3\xe4\x93\x02\'*%/v2beta1/{log_name=projects/*/logs/*}\x12\x86\x01\n\x0fWriteLogEntries\x12).google.logging.v2.WriteLogEntriesRequest\x1a*.google.logging.v2.WriteLogEntriesResponse\"\x1c\x82\xd3\xe4\x93\x02\x16\"\x11/v2/entries:write:\x01*\x12\x82\x01\n\x0eListLogEntries\x12(.google.logging.v2.ListLogEntriesRequest\x1a).google.logging.v2.ListLogEntriesResponse\"\x1b\x82\xd3\xe4\x93\x02\x15\"\x10/v2/entries:list:\x01*\x12\xc5\x01\n ListMonitoredResourceDescriptors\x12:.google.logging.v2.ListMonitoredResourceDescriptorsRequest\x1a;.google.logging.v2.ListMonitoredResourceDescriptorsResponse\"(\x82\xd3\xe4\x93\x02\"\x12 /v2/monitoredResourceDescriptors\x12r\n\x08ListLogs\x12\".google.logging.v2.ListLogsRequest\x1a#.google.logging.v2.ListLogsResponse\"\x1d\x82\xd3\xe4\x93\x02\x17\x12\x15/v2/{parent=*/*}/logsB\x98\x01\n\x15\x63om.google.logging.v2B\x0cLoggingProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR,google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_rpc_dot_status__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_DELETELOGREQUEST = _descriptor.Descriptor( + name='DeleteLogRequest', + full_name='google.logging.v2.DeleteLogRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='log_name', full_name='google.logging.v2.DeleteLogRequest.log_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=299, + serialized_end=335, +) + + +_WRITELOGENTRIESREQUEST_LABELSENTRY = _descriptor.Descriptor( + name='LabelsEntry', + full_name='google.logging.v2.WriteLogEntriesRequest.LabelsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.logging.v2.WriteLogEntriesRequest.LabelsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.logging.v2.WriteLogEntriesRequest.LabelsEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=573, + serialized_end=618, +) + +_WRITELOGENTRIESREQUEST = _descriptor.Descriptor( + name='WriteLogEntriesRequest', + full_name='google.logging.v2.WriteLogEntriesRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='log_name', full_name='google.logging.v2.WriteLogEntriesRequest.log_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='resource', full_name='google.logging.v2.WriteLogEntriesRequest.resource', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='labels', full_name='google.logging.v2.WriteLogEntriesRequest.labels', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='entries', full_name='google.logging.v2.WriteLogEntriesRequest.entries', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='partial_success', full_name='google.logging.v2.WriteLogEntriesRequest.partial_success', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_WRITELOGENTRIESREQUEST_LABELSENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=338, + serialized_end=618, +) + + +_WRITELOGENTRIESRESPONSE = _descriptor.Descriptor( + name='WriteLogEntriesResponse', + full_name='google.logging.v2.WriteLogEntriesResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=620, + serialized_end=645, +) + + +_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY = _descriptor.Descriptor( + name='LogEntryErrorsEntry', + full_name='google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry.key', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=775, + serialized_end=848, +) + +_WRITELOGENTRIESPARTIALERRORS = _descriptor.Descriptor( + name='WriteLogEntriesPartialErrors', + full_name='google.logging.v2.WriteLogEntriesPartialErrors', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='log_entry_errors', full_name='google.logging.v2.WriteLogEntriesPartialErrors.log_entry_errors', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=648, + serialized_end=848, +) + + +_LISTLOGENTRIESREQUEST = _descriptor.Descriptor( + name='ListLogEntriesRequest', + full_name='google.logging.v2.ListLogEntriesRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project_ids', full_name='google.logging.v2.ListLogEntriesRequest.project_ids', index=0, + number=1, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='resource_names', full_name='google.logging.v2.ListLogEntriesRequest.resource_names', index=1, + number=8, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='filter', full_name='google.logging.v2.ListLogEntriesRequest.filter', index=2, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='order_by', full_name='google.logging.v2.ListLogEntriesRequest.order_by', index=3, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.logging.v2.ListLogEntriesRequest.page_size', index=4, + number=4, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.logging.v2.ListLogEntriesRequest.page_token', index=5, + number=5, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=851, + serialized_end=992, +) + + +_LISTLOGENTRIESRESPONSE = _descriptor.Descriptor( + name='ListLogEntriesResponse', + full_name='google.logging.v2.ListLogEntriesResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='entries', full_name='google.logging.v2.ListLogEntriesResponse.entries', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.logging.v2.ListLogEntriesResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=994, + serialized_end=1089, +) + + +_LISTMONITOREDRESOURCEDESCRIPTORSREQUEST = _descriptor.Descriptor( + name='ListMonitoredResourceDescriptorsRequest', + full_name='google.logging.v2.ListMonitoredResourceDescriptorsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='page_size', full_name='google.logging.v2.ListMonitoredResourceDescriptorsRequest.page_size', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.logging.v2.ListMonitoredResourceDescriptorsRequest.page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1091, + serialized_end=1171, +) + + +_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE = _descriptor.Descriptor( + name='ListMonitoredResourceDescriptorsResponse', + full_name='google.logging.v2.ListMonitoredResourceDescriptorsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='resource_descriptors', full_name='google.logging.v2.ListMonitoredResourceDescriptorsResponse.resource_descriptors', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.logging.v2.ListMonitoredResourceDescriptorsResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1174, + serialized_end=1312, +) + + +_LISTLOGSREQUEST = _descriptor.Descriptor( + name='ListLogsRequest', + full_name='google.logging.v2.ListLogsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.logging.v2.ListLogsRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.logging.v2.ListLogsRequest.page_size', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.logging.v2.ListLogsRequest.page_token', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1314, + serialized_end=1386, +) + + +_LISTLOGSRESPONSE = _descriptor.Descriptor( + name='ListLogsResponse', + full_name='google.logging.v2.ListLogsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='log_names', full_name='google.logging.v2.ListLogsResponse.log_names', index=0, + number=3, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.logging.v2.ListLogsResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1388, + serialized_end=1450, +) + +_WRITELOGENTRIESREQUEST_LABELSENTRY.containing_type = _WRITELOGENTRIESREQUEST +_WRITELOGENTRIESREQUEST.fields_by_name['resource'].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCE +_WRITELOGENTRIESREQUEST.fields_by_name['labels'].message_type = _WRITELOGENTRIESREQUEST_LABELSENTRY +_WRITELOGENTRIESREQUEST.fields_by_name['entries'].message_type = google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2._LOGENTRY +_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY.fields_by_name['value'].message_type = google_dot_rpc_dot_status__pb2._STATUS +_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY.containing_type = _WRITELOGENTRIESPARTIALERRORS +_WRITELOGENTRIESPARTIALERRORS.fields_by_name['log_entry_errors'].message_type = _WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY +_LISTLOGENTRIESRESPONSE.fields_by_name['entries'].message_type = google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2._LOGENTRY +_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE.fields_by_name['resource_descriptors'].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCEDESCRIPTOR +DESCRIPTOR.message_types_by_name['DeleteLogRequest'] = _DELETELOGREQUEST +DESCRIPTOR.message_types_by_name['WriteLogEntriesRequest'] = _WRITELOGENTRIESREQUEST +DESCRIPTOR.message_types_by_name['WriteLogEntriesResponse'] = _WRITELOGENTRIESRESPONSE +DESCRIPTOR.message_types_by_name['WriteLogEntriesPartialErrors'] = _WRITELOGENTRIESPARTIALERRORS +DESCRIPTOR.message_types_by_name['ListLogEntriesRequest'] = _LISTLOGENTRIESREQUEST +DESCRIPTOR.message_types_by_name['ListLogEntriesResponse'] = _LISTLOGENTRIESRESPONSE +DESCRIPTOR.message_types_by_name['ListMonitoredResourceDescriptorsRequest'] = _LISTMONITOREDRESOURCEDESCRIPTORSREQUEST +DESCRIPTOR.message_types_by_name['ListMonitoredResourceDescriptorsResponse'] = _LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE +DESCRIPTOR.message_types_by_name['ListLogsRequest'] = _LISTLOGSREQUEST +DESCRIPTOR.message_types_by_name['ListLogsResponse'] = _LISTLOGSRESPONSE + +DeleteLogRequest = _reflection.GeneratedProtocolMessageType('DeleteLogRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETELOGREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_pb2' + , + __doc__ = """The parameters to DeleteLog. + + + Attributes: + log_name: + Required. The resource name of the log to delete: :: + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" ``[LOG_ID]`` must be URL- + encoded. For example, ``"projects/my-project- + id/logs/syslog"``, ``"organizations/1234567890/logs/cloudresou + rcemanager.googleapis.com%2Factivity"``. For more information + about log names, see [LogEntry][google.logging.v2.LogEntry]. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteLogRequest) + )) +_sym_db.RegisterMessage(DeleteLogRequest) + +WriteLogEntriesRequest = _reflection.GeneratedProtocolMessageType('WriteLogEntriesRequest', (_message.Message,), dict( + + LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( + DESCRIPTOR = _WRITELOGENTRIESREQUEST_LABELSENTRY, + __module__ = 'google.cloud.logging_v2.proto.logging_pb2' + # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesRequest.LabelsEntry) + )) + , + DESCRIPTOR = _WRITELOGENTRIESREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_pb2' + , + __doc__ = """The parameters to WriteLogEntries. + + + Attributes: + log_name: + Optional. A default log resource name that is assigned to all + log entries in ``entries`` that do not specify a value for + ``log_name``: :: "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" ``[LOG_ID]`` must be URL- + encoded. For example, ``"projects/my-project-id/logs/syslog"`` + or ``"organizations/1234567890/logs/cloudresourcemanager.googl + eapis.com%2Factivity"``. For more information about log names, + see [LogEntry][google.logging.v2.LogEntry]. + resource: + Optional. A default monitored resource object that is assigned + to all log entries in ``entries`` that do not specify a value + for ``resource``. Example: :: { "type": "gce_instance", + "labels": { "zone": "us-central1-a", "instance_id": + "00000000000000000000" }} See + [LogEntry][google.logging.v2.LogEntry]. + labels: + Optional. Default labels that are added to the ``labels`` + field of all log entries in ``entries``. If a log entry + already has a label with the same key as a label in this + parameter, then the log entry's label is not changed. See + [LogEntry][google.logging.v2.LogEntry]. + entries: + Required. The log entries to send to Stackdriver Logging. The + order of log entries in this list does not matter. Values + supplied in this method's ``log_name``, ``resource``, and + ``labels`` fields are copied into those log entries in this + list that do not include values for their corresponding + fields. For more information, see the + [LogEntry][google.logging.v2.LogEntry] type. If the + ``timestamp`` or ``insert_id`` fields are missing in log + entries, then this method supplies the current time or a + unique identifier, respectively. The supplied values are + chosen so that, among the log entries that did not supply + their own values, the entries earlier in the list will sort + before the entries later in the list. See the ``entries.list`` + method. Log entries with timestamps that are more than the + `logs retention period `__ in the past + or more than 24 hours in the future might be discarded. + Discarding does not return an error. To improve throughput + and to avoid exceeding the `quota limit `__ for calls to ``entries.write``, you should try to + include several log entries in this list, rather than calling + this method for each individual log entry. + partial_success: + Optional. Whether valid entries should be written even if some + other entries fail due to INVALID\_ARGUMENT or + PERMISSION\_DENIED errors. If any entry is not written, then + the response status is the error associated with one of the + failed entries and the response includes error details keyed + by the entries' zero-based index in the ``entries.write`` + method. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesRequest) + )) +_sym_db.RegisterMessage(WriteLogEntriesRequest) +_sym_db.RegisterMessage(WriteLogEntriesRequest.LabelsEntry) + +WriteLogEntriesResponse = _reflection.GeneratedProtocolMessageType('WriteLogEntriesResponse', (_message.Message,), dict( + DESCRIPTOR = _WRITELOGENTRIESRESPONSE, + __module__ = 'google.cloud.logging_v2.proto.logging_pb2' + , + __doc__ = """Result returned from WriteLogEntries. empty + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesResponse) + )) +_sym_db.RegisterMessage(WriteLogEntriesResponse) + +WriteLogEntriesPartialErrors = _reflection.GeneratedProtocolMessageType('WriteLogEntriesPartialErrors', (_message.Message,), dict( + + LogEntryErrorsEntry = _reflection.GeneratedProtocolMessageType('LogEntryErrorsEntry', (_message.Message,), dict( + DESCRIPTOR = _WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY, + __module__ = 'google.cloud.logging_v2.proto.logging_pb2' + # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry) + )) + , + DESCRIPTOR = _WRITELOGENTRIESPARTIALERRORS, + __module__ = 'google.cloud.logging_v2.proto.logging_pb2' + , + __doc__ = """Error details for WriteLogEntries with partial success. + + + Attributes: + log_entry_errors: + When ``WriteLogEntriesRequest.partial_success`` is true, + records the error status for entries that were not written due + to a permanent error, keyed by the entry's zero-based index in + ``WriteLogEntriesRequest.entries``. Failed requests for which + no entries are written will not include per-entry errors. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesPartialErrors) + )) +_sym_db.RegisterMessage(WriteLogEntriesPartialErrors) +_sym_db.RegisterMessage(WriteLogEntriesPartialErrors.LogEntryErrorsEntry) + +ListLogEntriesRequest = _reflection.GeneratedProtocolMessageType('ListLogEntriesRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTLOGENTRIESREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_pb2' + , + __doc__ = """The parameters to ``ListLogEntries``. + + + Attributes: + project_ids: + Deprecated. Use ``resource_names`` instead. One or more + project identifiers or project numbers from which to retrieve + log entries. Example: ``"my-project-1A"``. If present, these + project identifiers are converted to resource name format and + added to the list of resources in ``resource_names``. + resource_names: + Required. Names of one or more parent resources from which to + retrieve log entries: :: "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" Projects listed in the ``project_ids`` + field are added to this list. + filter: + Optional. A filter that chooses which log entries to return. + See `Advanced Logs Filters + `__. Only log entries + that match the filter are returned. An empty filter matches + all log entries in the resources listed in ``resource_names``. + Referencing a parent resource that is not listed in + ``resource_names`` will cause the filter to return no results. + The maximum length of the filter is 20000 characters. + order_by: + Optional. How the results should be sorted. Presently, the + only permitted values are ``"timestamp asc"`` (default) and + ``"timestamp desc"``. The first option returns entries in + order of increasing values of ``LogEntry.timestamp`` (oldest + first), and the second option returns entries in order of + decreasing timestamps (newest first). Entries with equal + timestamps are returned in order of their ``insert_id`` + values. + page_size: + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``next_page_token`` in the response indicates that more + results might be available. + page_token: + Optional. If present, then retrieve the next batch of results + from the preceding call to this method. ``page_token`` must be + the value of ``next_page_token`` from the previous response. + The values of other method parameters should be identical to + those in the previous call. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogEntriesRequest) + )) +_sym_db.RegisterMessage(ListLogEntriesRequest) + +ListLogEntriesResponse = _reflection.GeneratedProtocolMessageType('ListLogEntriesResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTLOGENTRIESRESPONSE, + __module__ = 'google.cloud.logging_v2.proto.logging_pb2' + , + __doc__ = """Result returned from ``ListLogEntries``. + + + Attributes: + entries: + A list of log entries. If ``entries`` is empty, + ``nextPageToken`` may still be returned, indicating that more + entries may exist. See ``nextPageToken`` for more information. + next_page_token: + If there might be more results than those appearing in this + response, then ``nextPageToken`` is included. To get the next + set of results, call this method again using the value of + ``nextPageToken`` as ``pageToken``. If a value for + ``next_page_token`` appears and the ``entries`` field is + empty, it means that the search found no log entries so far + but it did not have time to search all the possible log + entries. Retry the method with this value for ``page_token`` + to continue the search. Alternatively, consider speeding up + the search by changing your filter to specify a single log + name or resource type, or to narrow the time range of the + search. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogEntriesResponse) + )) +_sym_db.RegisterMessage(ListLogEntriesResponse) + +ListMonitoredResourceDescriptorsRequest = _reflection.GeneratedProtocolMessageType('ListMonitoredResourceDescriptorsRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTMONITOREDRESOURCEDESCRIPTORSREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_pb2' + , + __doc__ = """The parameters to ListMonitoredResourceDescriptors + + + Attributes: + page_size: + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more results + might be available. + page_token: + Optional. If present, then retrieve the next batch of results + from the preceding call to this method. ``pageToken`` must be + the value of ``nextPageToken`` from the previous response. The + values of other method parameters should be identical to those + in the previous call. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListMonitoredResourceDescriptorsRequest) + )) +_sym_db.RegisterMessage(ListMonitoredResourceDescriptorsRequest) + +ListMonitoredResourceDescriptorsResponse = _reflection.GeneratedProtocolMessageType('ListMonitoredResourceDescriptorsResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE, + __module__ = 'google.cloud.logging_v2.proto.logging_pb2' + , + __doc__ = """Result returned from ListMonitoredResourceDescriptors. + + + Attributes: + resource_descriptors: + A list of resource descriptors. + next_page_token: + If there might be more results than those appearing in this + response, then ``nextPageToken`` is included. To get the next + set of results, call this method again using the value of + ``nextPageToken`` as ``pageToken``. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListMonitoredResourceDescriptorsResponse) + )) +_sym_db.RegisterMessage(ListMonitoredResourceDescriptorsResponse) + +ListLogsRequest = _reflection.GeneratedProtocolMessageType('ListLogsRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTLOGSREQUEST, + __module__ = 'google.cloud.logging_v2.proto.logging_pb2' + , + __doc__ = """The parameters to ListLogs. + + + Attributes: + parent: + Required. The resource name that owns the logs: :: + "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + page_size: + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more results + might be available. + page_token: + Optional. If present, then retrieve the next batch of results + from the preceding call to this method. ``pageToken`` must be + the value of ``nextPageToken`` from the previous response. The + values of other method parameters should be identical to those + in the previous call. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogsRequest) + )) +_sym_db.RegisterMessage(ListLogsRequest) + +ListLogsResponse = _reflection.GeneratedProtocolMessageType('ListLogsResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTLOGSRESPONSE, + __module__ = 'google.cloud.logging_v2.proto.logging_pb2' + , + __doc__ = """Result returned from ListLogs. + + + Attributes: + log_names: + A list of log names. For example, ``"projects/my- + project/syslog"`` or ``"organizations/123/cloudresourcemanager + .googleapis.com%2Factivity"``. + next_page_token: + If there might be more results than those appearing in this + response, then ``nextPageToken`` is included. To get the next + set of results, call this method again using the value of + ``nextPageToken`` as ``pageToken``. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogsResponse) + )) +_sym_db.RegisterMessage(ListLogsResponse) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\025com.google.logging.v2B\014LoggingProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2')) +_WRITELOGENTRIESREQUEST_LABELSENTRY.has_options = True +_WRITELOGENTRIESREQUEST_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY.has_options = True +_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +try: + # THESE ELEMENTS WILL BE DEPRECATED. + # Please use the generated *_pb2_grpc.py files instead. + import grpc + from grpc.beta import implementations as beta_implementations + from grpc.beta import interfaces as beta_interfaces + from grpc.framework.common import cardinality + from grpc.framework.interfaces.face import utilities as face_utilities + + + class LoggingServiceV2Stub(object): + """Service for ingesting and querying logs. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.DeleteLog = channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/DeleteLog', + request_serializer=DeleteLogRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.WriteLogEntries = channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/WriteLogEntries', + request_serializer=WriteLogEntriesRequest.SerializeToString, + response_deserializer=WriteLogEntriesResponse.FromString, + ) + self.ListLogEntries = channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListLogEntries', + request_serializer=ListLogEntriesRequest.SerializeToString, + response_deserializer=ListLogEntriesResponse.FromString, + ) + self.ListMonitoredResourceDescriptors = channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors', + request_serializer=ListMonitoredResourceDescriptorsRequest.SerializeToString, + response_deserializer=ListMonitoredResourceDescriptorsResponse.FromString, + ) + self.ListLogs = channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListLogs', + request_serializer=ListLogsRequest.SerializeToString, + response_deserializer=ListLogsResponse.FromString, + ) + + + class LoggingServiceV2Servicer(object): + """Service for ingesting and querying logs. + """ + + def DeleteLog(self, request, context): + """Deletes all the log entries in a log. + The log reappears if it receives new entries. + Log entries written shortly before the delete operation might not be + deleted. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def WriteLogEntries(self, request, context): + """## Log entry resources + + Writes log entries to Stackdriver Logging. This API method is the + only way to send log entries to Stackdriver Logging. This method + is used, directly or indirectly, by the Stackdriver Logging agent + (fluentd) and all logging libraries configured to use Stackdriver + Logging. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListLogEntries(self, request, context): + """Lists log entries. Use this method to retrieve log entries from + Stackdriver Logging. For ways to export log entries, see + [Exporting Logs](/logging/docs/export). + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListMonitoredResourceDescriptors(self, request, context): + """Lists the descriptors for monitored resource types used by Stackdriver + Logging. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListLogs(self, request, context): + """Lists the logs in projects, organizations, folders, or billing accounts. + Only logs that have entries are listed. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + + def add_LoggingServiceV2Servicer_to_server(servicer, server): + rpc_method_handlers = { + 'DeleteLog': grpc.unary_unary_rpc_method_handler( + servicer.DeleteLog, + request_deserializer=DeleteLogRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'WriteLogEntries': grpc.unary_unary_rpc_method_handler( + servicer.WriteLogEntries, + request_deserializer=WriteLogEntriesRequest.FromString, + response_serializer=WriteLogEntriesResponse.SerializeToString, + ), + 'ListLogEntries': grpc.unary_unary_rpc_method_handler( + servicer.ListLogEntries, + request_deserializer=ListLogEntriesRequest.FromString, + response_serializer=ListLogEntriesResponse.SerializeToString, + ), + 'ListMonitoredResourceDescriptors': grpc.unary_unary_rpc_method_handler( + servicer.ListMonitoredResourceDescriptors, + request_deserializer=ListMonitoredResourceDescriptorsRequest.FromString, + response_serializer=ListMonitoredResourceDescriptorsResponse.SerializeToString, + ), + 'ListLogs': grpc.unary_unary_rpc_method_handler( + servicer.ListLogs, + request_deserializer=ListLogsRequest.FromString, + response_serializer=ListLogsResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.logging.v2.LoggingServiceV2', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + class BetaLoggingServiceV2Servicer(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """Service for ingesting and querying logs. + """ + def DeleteLog(self, request, context): + """Deletes all the log entries in a log. + The log reappears if it receives new entries. + Log entries written shortly before the delete operation might not be + deleted. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def WriteLogEntries(self, request, context): + """## Log entry resources + + Writes log entries to Stackdriver Logging. This API method is the + only way to send log entries to Stackdriver Logging. This method + is used, directly or indirectly, by the Stackdriver Logging agent + (fluentd) and all logging libraries configured to use Stackdriver + Logging. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ListLogEntries(self, request, context): + """Lists log entries. Use this method to retrieve log entries from + Stackdriver Logging. For ways to export log entries, see + [Exporting Logs](/logging/docs/export). + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ListMonitoredResourceDescriptors(self, request, context): + """Lists the descriptors for monitored resource types used by Stackdriver + Logging. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ListLogs(self, request, context): + """Lists the logs in projects, organizations, folders, or billing accounts. + Only logs that have entries are listed. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + + class BetaLoggingServiceV2Stub(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """Service for ingesting and querying logs. + """ + def DeleteLog(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Deletes all the log entries in a log. + The log reappears if it receives new entries. + Log entries written shortly before the delete operation might not be + deleted. + """ + raise NotImplementedError() + DeleteLog.future = None + def WriteLogEntries(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """## Log entry resources + + Writes log entries to Stackdriver Logging. This API method is the + only way to send log entries to Stackdriver Logging. This method + is used, directly or indirectly, by the Stackdriver Logging agent + (fluentd) and all logging libraries configured to use Stackdriver + Logging. + """ + raise NotImplementedError() + WriteLogEntries.future = None + def ListLogEntries(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists log entries. Use this method to retrieve log entries from + Stackdriver Logging. For ways to export log entries, see + [Exporting Logs](/logging/docs/export). + """ + raise NotImplementedError() + ListLogEntries.future = None + def ListMonitoredResourceDescriptors(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists the descriptors for monitored resource types used by Stackdriver + Logging. + """ + raise NotImplementedError() + ListMonitoredResourceDescriptors.future = None + def ListLogs(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists the logs in projects, organizations, folders, or billing accounts. + Only logs that have entries are listed. + """ + raise NotImplementedError() + ListLogs.future = None + + + def beta_create_LoggingServiceV2_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_deserializers = { + ('google.logging.v2.LoggingServiceV2', 'DeleteLog'): DeleteLogRequest.FromString, + ('google.logging.v2.LoggingServiceV2', 'ListLogEntries'): ListLogEntriesRequest.FromString, + ('google.logging.v2.LoggingServiceV2', 'ListLogs'): ListLogsRequest.FromString, + ('google.logging.v2.LoggingServiceV2', 'ListMonitoredResourceDescriptors'): ListMonitoredResourceDescriptorsRequest.FromString, + ('google.logging.v2.LoggingServiceV2', 'WriteLogEntries'): WriteLogEntriesRequest.FromString, + } + response_serializers = { + ('google.logging.v2.LoggingServiceV2', 'DeleteLog'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.logging.v2.LoggingServiceV2', 'ListLogEntries'): ListLogEntriesResponse.SerializeToString, + ('google.logging.v2.LoggingServiceV2', 'ListLogs'): ListLogsResponse.SerializeToString, + ('google.logging.v2.LoggingServiceV2', 'ListMonitoredResourceDescriptors'): ListMonitoredResourceDescriptorsResponse.SerializeToString, + ('google.logging.v2.LoggingServiceV2', 'WriteLogEntries'): WriteLogEntriesResponse.SerializeToString, + } + method_implementations = { + ('google.logging.v2.LoggingServiceV2', 'DeleteLog'): face_utilities.unary_unary_inline(servicer.DeleteLog), + ('google.logging.v2.LoggingServiceV2', 'ListLogEntries'): face_utilities.unary_unary_inline(servicer.ListLogEntries), + ('google.logging.v2.LoggingServiceV2', 'ListLogs'): face_utilities.unary_unary_inline(servicer.ListLogs), + ('google.logging.v2.LoggingServiceV2', 'ListMonitoredResourceDescriptors'): face_utilities.unary_unary_inline(servicer.ListMonitoredResourceDescriptors), + ('google.logging.v2.LoggingServiceV2', 'WriteLogEntries'): face_utilities.unary_unary_inline(servicer.WriteLogEntries), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + + def beta_create_LoggingServiceV2_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_serializers = { + ('google.logging.v2.LoggingServiceV2', 'DeleteLog'): DeleteLogRequest.SerializeToString, + ('google.logging.v2.LoggingServiceV2', 'ListLogEntries'): ListLogEntriesRequest.SerializeToString, + ('google.logging.v2.LoggingServiceV2', 'ListLogs'): ListLogsRequest.SerializeToString, + ('google.logging.v2.LoggingServiceV2', 'ListMonitoredResourceDescriptors'): ListMonitoredResourceDescriptorsRequest.SerializeToString, + ('google.logging.v2.LoggingServiceV2', 'WriteLogEntries'): WriteLogEntriesRequest.SerializeToString, + } + response_deserializers = { + ('google.logging.v2.LoggingServiceV2', 'DeleteLog'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.logging.v2.LoggingServiceV2', 'ListLogEntries'): ListLogEntriesResponse.FromString, + ('google.logging.v2.LoggingServiceV2', 'ListLogs'): ListLogsResponse.FromString, + ('google.logging.v2.LoggingServiceV2', 'ListMonitoredResourceDescriptors'): ListMonitoredResourceDescriptorsResponse.FromString, + ('google.logging.v2.LoggingServiceV2', 'WriteLogEntries'): WriteLogEntriesResponse.FromString, + } + cardinalities = { + 'DeleteLog': cardinality.Cardinality.UNARY_UNARY, + 'ListLogEntries': cardinality.Cardinality.UNARY_UNARY, + 'ListLogs': cardinality.Cardinality.UNARY_UNARY, + 'ListMonitoredResourceDescriptors': cardinality.Cardinality.UNARY_UNARY, + 'WriteLogEntries': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.logging.v2.LoggingServiceV2', cardinalities, options=stub_options) +except ImportError: + pass +# @@protoc_insertion_point(module_scope) diff --git a/logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py b/logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py new file mode 100644 index 000000000000..fc16e12e140f --- /dev/null +++ b/logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py @@ -0,0 +1,128 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +import google.cloud.logging_v2.proto.logging_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2 +import google.protobuf.empty_pb2 as google_dot_protobuf_dot_empty__pb2 + + +class LoggingServiceV2Stub(object): + """Service for ingesting and querying logs. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.DeleteLog = channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/DeleteLog', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.DeleteLogRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.WriteLogEntries = channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/WriteLogEntries', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesResponse.FromString, + ) + self.ListLogEntries = channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListLogEntries', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesResponse.FromString, + ) + self.ListMonitoredResourceDescriptors = channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsResponse.FromString, + ) + self.ListLogs = channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListLogs', + request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsResponse.FromString, + ) + + +class LoggingServiceV2Servicer(object): + """Service for ingesting and querying logs. + """ + + def DeleteLog(self, request, context): + """Deletes all the log entries in a log. + The log reappears if it receives new entries. + Log entries written shortly before the delete operation might not be + deleted. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def WriteLogEntries(self, request, context): + """## Log entry resources + + Writes log entries to Stackdriver Logging. This API method is the + only way to send log entries to Stackdriver Logging. This method + is used, directly or indirectly, by the Stackdriver Logging agent + (fluentd) and all logging libraries configured to use Stackdriver + Logging. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListLogEntries(self, request, context): + """Lists log entries. Use this method to retrieve log entries from + Stackdriver Logging. For ways to export log entries, see + [Exporting Logs](/logging/docs/export). + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListMonitoredResourceDescriptors(self, request, context): + """Lists the descriptors for monitored resource types used by Stackdriver + Logging. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListLogs(self, request, context): + """Lists the logs in projects, organizations, folders, or billing accounts. + Only logs that have entries are listed. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_LoggingServiceV2Servicer_to_server(servicer, server): + rpc_method_handlers = { + 'DeleteLog': grpc.unary_unary_rpc_method_handler( + servicer.DeleteLog, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.DeleteLogRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'WriteLogEntries': grpc.unary_unary_rpc_method_handler( + servicer.WriteLogEntries, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesResponse.SerializeToString, + ), + 'ListLogEntries': grpc.unary_unary_rpc_method_handler( + servicer.ListLogEntries, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesResponse.SerializeToString, + ), + 'ListMonitoredResourceDescriptors': grpc.unary_unary_rpc_method_handler( + servicer.ListMonitoredResourceDescriptors, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsResponse.SerializeToString, + ), + 'ListLogs': grpc.unary_unary_rpc_method_handler( + servicer.ListLogs, + request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsRequest.FromString, + response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.logging.v2.LoggingServiceV2', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/logging/google/cloud/logging_v2/types.py b/logging/google/cloud/logging_v2/types.py new file mode 100644 index 000000000000..d440d8f58f07 --- /dev/null +++ b/logging/google/cloud/logging_v2/types.py @@ -0,0 +1,65 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import +import sys + +from google.api_core.protobuf_helpers import get_messages + +from google.api import distribution_pb2 +from google.api import http_pb2 +from google.api import label_pb2 +from google.api import metric_pb2 +from google.api import monitored_resource_pb2 +from google.cloud.logging_v2.proto import log_entry_pb2 +from google.cloud.logging_v2.proto import logging_config_pb2 +from google.cloud.logging_v2.proto import logging_metrics_pb2 +from google.cloud.logging_v2.proto import logging_pb2 +from google.logging.type import http_request_pb2 +from google.protobuf import any_pb2 +from google.protobuf import descriptor_pb2 +from google.protobuf import duration_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 +from google.protobuf import struct_pb2 +from google.protobuf import timestamp_pb2 +from google.rpc import status_pb2 + +names = [] +for module in ( + distribution_pb2, + http_pb2, + label_pb2, + metric_pb2, + monitored_resource_pb2, + log_entry_pb2, + logging_config_pb2, + logging_metrics_pb2, + logging_pb2, + http_request_pb2, + any_pb2, + descriptor_pb2, + duration_pb2, + empty_pb2, + field_mask_pb2, + struct_pb2, + timestamp_pb2, + status_pb2, +): + for name, message in get_messages(module).items(): + message.__module__ = 'google.cloud.logging_v2.types' + setattr(sys.modules[__name__], name, message) + names.append(name) + +__all__ = tuple(sorted(names)) diff --git a/logging/tests/system/gapic/v2/test_system_logging_service_v2_v2.py b/logging/tests/system/gapic/v2/test_system_logging_service_v2_v2.py new file mode 100644 index 000000000000..ad99b1081cc5 --- /dev/null +++ b/logging/tests/system/gapic/v2/test_system_logging_service_v2_v2.py @@ -0,0 +1,34 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import time + +from google.api import monitored_resource_pb2 +from google.cloud import logging_v2 +from google.cloud.logging_v2.proto import log_entry_pb2 +from google.cloud.logging_v2.proto import logging_pb2 + + +class TestSystemLoggingServiceV2(object): + def test_write_log_entries(self): + project_id = os.environ['PROJECT_ID'] + + client = logging_v2.LoggingServiceV2Client() + log_name = client.log_path(project_id, 'test-{0}'.format(time.time())) + resource = {} + labels = {} + entries = [] + response = client.write_log_entries( + entries, log_name=log_name, resource=resource, labels=labels) diff --git a/logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py b/logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py new file mode 100644 index 000000000000..a6893944cff4 --- /dev/null +++ b/logging/tests/unit/gapic/v2/test_config_service_v2_client_v2.py @@ -0,0 +1,460 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Unit tests.""" + +import pytest + +from google.cloud import logging_v2 +from google.cloud.logging_v2.proto import logging_config_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 + + +class MultiCallableStub(object): + """Stub for the grpc.UnaryUnaryMultiCallable interface.""" + + def __init__(self, method, channel_stub): + self.method = method + self.channel_stub = channel_stub + + def __call__(self, request, timeout=None, metadata=None, credentials=None): + self.channel_stub.requests.append((self.method, request)) + + response = None + if self.channel_stub.responses: + response = self.channel_stub.responses.pop() + + if isinstance(response, Exception): + raise response + + if response: + return response + + +class ChannelStub(object): + """Stub for the grpc.Channel interface.""" + + def __init__(self, responses=[]): + self.responses = responses + self.requests = [] + + def unary_unary(self, + method, + request_serializer=None, + response_deserializer=None): + return MultiCallableStub(method, self) + + +class CustomException(Exception): + pass + + +class TestConfigServiceV2Client(object): + def test_list_sinks(self): + # Setup Expected Response + next_page_token = '' + sinks_element = {} + sinks = [sinks_element] + expected_response = { + 'next_page_token': next_page_token, + 'sinks': sinks + } + expected_response = logging_config_pb2.ListSinksResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup Request + parent = client.project_path('[PROJECT]') + + paged_list_response = client.list_sinks(parent) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.sinks[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = logging_config_pb2.ListSinksRequest(parent=parent) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_sinks_exception(self): + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup request + parent = client.project_path('[PROJECT]') + + paged_list_response = client.list_sinks(parent) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_get_sink(self): + # Setup Expected Response + name = 'name3373707' + destination = 'destination-1429847026' + filter_ = 'filter-1274492040' + writer_identity = 'writerIdentity775638794' + include_children = True + expected_response = { + 'name': name, + 'destination': destination, + 'filter': filter_, + 'writer_identity': writer_identity, + 'include_children': include_children + } + expected_response = logging_config_pb2.LogSink(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup Request + sink_name = client.sink_path('[PROJECT]', '[SINK]') + + response = client.get_sink(sink_name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = logging_config_pb2.GetSinkRequest( + sink_name=sink_name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_sink_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup request + sink_name = client.sink_path('[PROJECT]', '[SINK]') + + with pytest.raises(CustomException): + client.get_sink(sink_name) + + def test_create_sink(self): + # Setup Expected Response + name = 'name3373707' + destination = 'destination-1429847026' + filter_ = 'filter-1274492040' + writer_identity = 'writerIdentity775638794' + include_children = True + expected_response = { + 'name': name, + 'destination': destination, + 'filter': filter_, + 'writer_identity': writer_identity, + 'include_children': include_children + } + expected_response = logging_config_pb2.LogSink(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup Request + parent = client.project_path('[PROJECT]') + sink = {} + + response = client.create_sink(parent, sink) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = logging_config_pb2.CreateSinkRequest( + parent=parent, sink=sink) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_sink_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup request + parent = client.project_path('[PROJECT]') + sink = {} + + with pytest.raises(CustomException): + client.create_sink(parent, sink) + + def test_update_sink(self): + # Setup Expected Response + name = 'name3373707' + destination = 'destination-1429847026' + filter_ = 'filter-1274492040' + writer_identity = 'writerIdentity775638794' + include_children = True + expected_response = { + 'name': name, + 'destination': destination, + 'filter': filter_, + 'writer_identity': writer_identity, + 'include_children': include_children + } + expected_response = logging_config_pb2.LogSink(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup Request + sink_name = client.sink_path('[PROJECT]', '[SINK]') + sink = {} + + response = client.update_sink(sink_name, sink) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = logging_config_pb2.UpdateSinkRequest( + sink_name=sink_name, sink=sink) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_update_sink_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup request + sink_name = client.sink_path('[PROJECT]', '[SINK]') + sink = {} + + with pytest.raises(CustomException): + client.update_sink(sink_name, sink) + + def test_delete_sink(self): + channel = ChannelStub() + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup Request + sink_name = client.sink_path('[PROJECT]', '[SINK]') + + client.delete_sink(sink_name) + + assert len(channel.requests) == 1 + expected_request = logging_config_pb2.DeleteSinkRequest( + sink_name=sink_name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_sink_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup request + sink_name = client.sink_path('[PROJECT]', '[SINK]') + + with pytest.raises(CustomException): + client.delete_sink(sink_name) + + def test_list_exclusions(self): + # Setup Expected Response + next_page_token = '' + exclusions_element = {} + exclusions = [exclusions_element] + expected_response = { + 'next_page_token': next_page_token, + 'exclusions': exclusions + } + expected_response = logging_config_pb2.ListExclusionsResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup Request + parent = client.project_path('[PROJECT]') + + paged_list_response = client.list_exclusions(parent) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.exclusions[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = logging_config_pb2.ListExclusionsRequest( + parent=parent) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_exclusions_exception(self): + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup request + parent = client.project_path('[PROJECT]') + + paged_list_response = client.list_exclusions(parent) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_get_exclusion(self): + # Setup Expected Response + name_2 = 'name2-1052831874' + description = 'description-1724546052' + filter_ = 'filter-1274492040' + disabled = True + expected_response = { + 'name': name_2, + 'description': description, + 'filter': filter_, + 'disabled': disabled + } + expected_response = logging_config_pb2.LogExclusion( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup Request + name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') + + response = client.get_exclusion(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = logging_config_pb2.GetExclusionRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_exclusion_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup request + name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') + + with pytest.raises(CustomException): + client.get_exclusion(name) + + def test_create_exclusion(self): + # Setup Expected Response + name = 'name3373707' + description = 'description-1724546052' + filter_ = 'filter-1274492040' + disabled = True + expected_response = { + 'name': name, + 'description': description, + 'filter': filter_, + 'disabled': disabled + } + expected_response = logging_config_pb2.LogExclusion( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup Request + parent = client.project_path('[PROJECT]') + exclusion = {} + + response = client.create_exclusion(parent, exclusion) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = logging_config_pb2.CreateExclusionRequest( + parent=parent, exclusion=exclusion) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_exclusion_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup request + parent = client.project_path('[PROJECT]') + exclusion = {} + + with pytest.raises(CustomException): + client.create_exclusion(parent, exclusion) + + def test_update_exclusion(self): + # Setup Expected Response + name_2 = 'name2-1052831874' + description = 'description-1724546052' + filter_ = 'filter-1274492040' + disabled = True + expected_response = { + 'name': name_2, + 'description': description, + 'filter': filter_, + 'disabled': disabled + } + expected_response = logging_config_pb2.LogExclusion( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup Request + name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') + exclusion = {} + update_mask = {} + + response = client.update_exclusion(name, exclusion, update_mask) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = logging_config_pb2.UpdateExclusionRequest( + name=name, exclusion=exclusion, update_mask=update_mask) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_update_exclusion_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup request + name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') + exclusion = {} + update_mask = {} + + with pytest.raises(CustomException): + client.update_exclusion(name, exclusion, update_mask) + + def test_delete_exclusion(self): + channel = ChannelStub() + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup Request + name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') + + client.delete_exclusion(name) + + assert len(channel.requests) == 1 + expected_request = logging_config_pb2.DeleteExclusionRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_exclusion_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.ConfigServiceV2Client(channel=channel) + + # Setup request + name = client.exclusion_path('[PROJECT]', '[EXCLUSION]') + + with pytest.raises(CustomException): + client.delete_exclusion(name) diff --git a/logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py b/logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py new file mode 100644 index 000000000000..7d942ec9e67f --- /dev/null +++ b/logging/tests/unit/gapic/v2/test_logging_service_v2_client_v2.py @@ -0,0 +1,238 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Unit tests.""" + +import pytest + +from google.api import monitored_resource_pb2 +from google.cloud import logging_v2 +from google.cloud.logging_v2.proto import log_entry_pb2 +from google.cloud.logging_v2.proto import logging_pb2 +from google.protobuf import empty_pb2 + + +class MultiCallableStub(object): + """Stub for the grpc.UnaryUnaryMultiCallable interface.""" + + def __init__(self, method, channel_stub): + self.method = method + self.channel_stub = channel_stub + + def __call__(self, request, timeout=None, metadata=None, credentials=None): + self.channel_stub.requests.append((self.method, request)) + + response = None + if self.channel_stub.responses: + response = self.channel_stub.responses.pop() + + if isinstance(response, Exception): + raise response + + if response: + return response + + +class ChannelStub(object): + """Stub for the grpc.Channel interface.""" + + def __init__(self, responses=[]): + self.responses = responses + self.requests = [] + + def unary_unary(self, + method, + request_serializer=None, + response_deserializer=None): + return MultiCallableStub(method, self) + + +class CustomException(Exception): + pass + + +class TestLoggingServiceV2Client(object): + def test_delete_log(self): + channel = ChannelStub() + client = logging_v2.LoggingServiceV2Client(channel=channel) + + # Setup Request + log_name = client.log_path('[PROJECT]', '[LOG]') + + client.delete_log(log_name) + + assert len(channel.requests) == 1 + expected_request = logging_pb2.DeleteLogRequest(log_name=log_name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_log_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.LoggingServiceV2Client(channel=channel) + + # Setup request + log_name = client.log_path('[PROJECT]', '[LOG]') + + with pytest.raises(CustomException): + client.delete_log(log_name) + + def test_write_log_entries(self): + # Setup Expected Response + expected_response = {} + expected_response = logging_pb2.WriteLogEntriesResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.LoggingServiceV2Client(channel=channel) + + # Setup Request + entries = [] + + response = client.write_log_entries(entries) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = logging_pb2.WriteLogEntriesRequest(entries=entries) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_write_log_entries_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.LoggingServiceV2Client(channel=channel) + + # Setup request + entries = [] + + with pytest.raises(CustomException): + client.write_log_entries(entries) + + def test_list_log_entries(self): + # Setup Expected Response + next_page_token = '' + entries_element = {} + entries = [entries_element] + expected_response = { + 'next_page_token': next_page_token, + 'entries': entries + } + expected_response = logging_pb2.ListLogEntriesResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.LoggingServiceV2Client(channel=channel) + + # Setup Request + resource_names = [] + + paged_list_response = client.list_log_entries(resource_names) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.entries[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = logging_pb2.ListLogEntriesRequest( + resource_names=resource_names) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_log_entries_exception(self): + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.LoggingServiceV2Client(channel=channel) + + # Setup request + resource_names = [] + + paged_list_response = client.list_log_entries(resource_names) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_list_monitored_resource_descriptors(self): + # Setup Expected Response + next_page_token = '' + resource_descriptors_element = {} + resource_descriptors = [resource_descriptors_element] + expected_response = { + 'next_page_token': next_page_token, + 'resource_descriptors': resource_descriptors + } + expected_response = logging_pb2.ListMonitoredResourceDescriptorsResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.LoggingServiceV2Client(channel=channel) + + paged_list_response = client.list_monitored_resource_descriptors() + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.resource_descriptors[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = logging_pb2.ListMonitoredResourceDescriptorsRequest( + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_monitored_resource_descriptors_exception(self): + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.LoggingServiceV2Client(channel=channel) + + paged_list_response = client.list_monitored_resource_descriptors() + with pytest.raises(CustomException): + list(paged_list_response) + + def test_list_logs(self): + # Setup Expected Response + next_page_token = '' + log_names_element = 'logNamesElement-1079688374' + log_names = [log_names_element] + expected_response = { + 'next_page_token': next_page_token, + 'log_names': log_names + } + expected_response = logging_pb2.ListLogsResponse(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.LoggingServiceV2Client(channel=channel) + + # Setup Request + parent = client.project_path('[PROJECT]') + + paged_list_response = client.list_logs(parent) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.log_names[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = logging_pb2.ListLogsRequest(parent=parent) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_logs_exception(self): + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.LoggingServiceV2Client(channel=channel) + + # Setup request + parent = client.project_path('[PROJECT]') + + paged_list_response = client.list_logs(parent) + with pytest.raises(CustomException): + list(paged_list_response) diff --git a/logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py b/logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py new file mode 100644 index 000000000000..2efc90c24a1d --- /dev/null +++ b/logging/tests/unit/gapic/v2/test_metrics_service_v2_client_v2.py @@ -0,0 +1,256 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Unit tests.""" + +import pytest + +from google.cloud import logging_v2 +from google.cloud.logging_v2.proto import logging_metrics_pb2 +from google.protobuf import empty_pb2 + + +class MultiCallableStub(object): + """Stub for the grpc.UnaryUnaryMultiCallable interface.""" + + def __init__(self, method, channel_stub): + self.method = method + self.channel_stub = channel_stub + + def __call__(self, request, timeout=None, metadata=None, credentials=None): + self.channel_stub.requests.append((self.method, request)) + + response = None + if self.channel_stub.responses: + response = self.channel_stub.responses.pop() + + if isinstance(response, Exception): + raise response + + if response: + return response + + +class ChannelStub(object): + """Stub for the grpc.Channel interface.""" + + def __init__(self, responses=[]): + self.responses = responses + self.requests = [] + + def unary_unary(self, + method, + request_serializer=None, + response_deserializer=None): + return MultiCallableStub(method, self) + + +class CustomException(Exception): + pass + + +class TestMetricsServiceV2Client(object): + def test_list_log_metrics(self): + # Setup Expected Response + next_page_token = '' + metrics_element = {} + metrics = [metrics_element] + expected_response = { + 'next_page_token': next_page_token, + 'metrics': metrics + } + expected_response = logging_metrics_pb2.ListLogMetricsResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.MetricsServiceV2Client(channel=channel) + + # Setup Request + parent = client.project_path('[PROJECT]') + + paged_list_response = client.list_log_metrics(parent) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.metrics[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = logging_metrics_pb2.ListLogMetricsRequest( + parent=parent) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_log_metrics_exception(self): + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.MetricsServiceV2Client(channel=channel) + + # Setup request + parent = client.project_path('[PROJECT]') + + paged_list_response = client.list_log_metrics(parent) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_get_log_metric(self): + # Setup Expected Response + name = 'name3373707' + description = 'description-1724546052' + filter_ = 'filter-1274492040' + value_extractor = 'valueExtractor2047672534' + expected_response = { + 'name': name, + 'description': description, + 'filter': filter_, + 'value_extractor': value_extractor + } + expected_response = logging_metrics_pb2.LogMetric(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.MetricsServiceV2Client(channel=channel) + + # Setup Request + metric_name = client.metric_path('[PROJECT]', '[METRIC]') + + response = client.get_log_metric(metric_name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = logging_metrics_pb2.GetLogMetricRequest( + metric_name=metric_name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_log_metric_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.MetricsServiceV2Client(channel=channel) + + # Setup request + metric_name = client.metric_path('[PROJECT]', '[METRIC]') + + with pytest.raises(CustomException): + client.get_log_metric(metric_name) + + def test_create_log_metric(self): + # Setup Expected Response + name = 'name3373707' + description = 'description-1724546052' + filter_ = 'filter-1274492040' + value_extractor = 'valueExtractor2047672534' + expected_response = { + 'name': name, + 'description': description, + 'filter': filter_, + 'value_extractor': value_extractor + } + expected_response = logging_metrics_pb2.LogMetric(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.MetricsServiceV2Client(channel=channel) + + # Setup Request + parent = client.project_path('[PROJECT]') + metric = {} + + response = client.create_log_metric(parent, metric) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = logging_metrics_pb2.CreateLogMetricRequest( + parent=parent, metric=metric) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_log_metric_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.MetricsServiceV2Client(channel=channel) + + # Setup request + parent = client.project_path('[PROJECT]') + metric = {} + + with pytest.raises(CustomException): + client.create_log_metric(parent, metric) + + def test_update_log_metric(self): + # Setup Expected Response + name = 'name3373707' + description = 'description-1724546052' + filter_ = 'filter-1274492040' + value_extractor = 'valueExtractor2047672534' + expected_response = { + 'name': name, + 'description': description, + 'filter': filter_, + 'value_extractor': value_extractor + } + expected_response = logging_metrics_pb2.LogMetric(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = logging_v2.MetricsServiceV2Client(channel=channel) + + # Setup Request + metric_name = client.metric_path('[PROJECT]', '[METRIC]') + metric = {} + + response = client.update_log_metric(metric_name, metric) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = logging_metrics_pb2.UpdateLogMetricRequest( + metric_name=metric_name, metric=metric) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_update_log_metric_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.MetricsServiceV2Client(channel=channel) + + # Setup request + metric_name = client.metric_path('[PROJECT]', '[METRIC]') + metric = {} + + with pytest.raises(CustomException): + client.update_log_metric(metric_name, metric) + + def test_delete_log_metric(self): + channel = ChannelStub() + client = logging_v2.MetricsServiceV2Client(channel=channel) + + # Setup Request + metric_name = client.metric_path('[PROJECT]', '[METRIC]') + + client.delete_log_metric(metric_name) + + assert len(channel.requests) == 1 + expected_request = logging_metrics_pb2.DeleteLogMetricRequest( + metric_name=metric_name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_log_metric_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = logging_v2.MetricsServiceV2Client(channel=channel) + + # Setup request + metric_name = client.metric_path('[PROJECT]', '[METRIC]') + + with pytest.raises(CustomException): + client.delete_log_metric(metric_name) From c0ddd73a3ba316dd8856da0135f342a31712eaec Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 17 Jan 2018 15:59:24 -0800 Subject: [PATCH 2/5] Removing all references to gax --- .../cloud/logging/{_gax.py => _gapic.py} | 223 +-- logging/google/cloud/logging/client.py | 16 +- logging/nox.py | 3 +- logging/setup.py | 5 +- .../{system.py => system/test_system.py} | 19 +- logging/tests/unit/test__gapic.py | 650 +++++++ logging/tests/unit/test__gax.py | 1614 ----------------- logging/tests/unit/test_client.py | 35 +- 8 files changed, 752 insertions(+), 1813 deletions(-) rename logging/google/cloud/logging/{_gax.py => _gapic.py} (74%) rename logging/tests/{system.py => system/test_system.py} (97%) create mode 100644 logging/tests/unit/test__gapic.py delete mode 100644 logging/tests/unit/test__gax.py diff --git a/logging/google/cloud/logging/_gax.py b/logging/google/cloud/logging/_gapic.py similarity index 74% rename from logging/google/cloud/logging/_gax.py rename to logging/google/cloud/logging/_gapic.py index 3f3624b47cc4..a292721111eb 100644 --- a/logging/google/cloud/logging/_gax.py +++ b/logging/google/cloud/logging/_gapic.py @@ -12,50 +12,46 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""GAX wrapper for Logging API requests.""" +"""Wrapper for adapting the autogenerated gapic client to the hand-written +client.""" import functools -from google.api_core import page_iterator -from google.cloud.gapic.logging.v2.config_service_v2_client import ( +from google.api_core.gapic_v1 import client_info +from google.cloud.logging_v2.gapic.config_service_v2_client import ( ConfigServiceV2Client) -from google.cloud.gapic.logging.v2.logging_service_v2_client import ( +from google.cloud.logging_v2.gapic.logging_service_v2_client import ( LoggingServiceV2Client) -from google.cloud.gapic.logging.v2.metrics_service_v2_client import ( +from google.cloud.logging_v2.gapic.metrics_service_v2_client import ( MetricsServiceV2Client) -from google.gax import CallOptions -from google.gax import INITIAL_PAGE -from google.gax.errors import GaxError -from google.gax.grpc import exc_to_code -from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink -from google.cloud.proto.logging.v2.logging_metrics_pb2 import LogMetric -from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry +from google.cloud.logging_v2.proto.logging_config_pb2 import LogSink +from google.cloud.logging_v2.proto.logging_metrics_pb2 import LogMetric +from google.cloud.logging_v2.proto.log_entry_pb2 import LogEntry from google.protobuf.json_format import MessageToDict from google.protobuf.json_format import ParseDict -from grpc import StatusCode -from google.cloud._helpers import make_secure_channel -from google.cloud._http import DEFAULT_USER_AGENT -from google.cloud.exceptions import Conflict -from google.cloud.exceptions import NotFound from google.cloud.logging import __version__ from google.cloud.logging._helpers import entry_from_resource from google.cloud.logging.sink import Sink from google.cloud.logging.metric import Metric +_CLIENT_INFO = client_info.ClientInfo( + client_library_version=__version__) + + class _LoggingAPI(object): """Helper mapping logging-related APIs. - :type gax_api: + :type gapic_api: :class:`.logging_service_v2_client.LoggingServiceV2Client` - :param gax_api: API object used to make GAX requests. + :param gapic_api: API object used to make RPCs. :type client: :class:`~google.cloud.logging.client.Client` :param client: The client that owns this API object. """ - def __init__(self, gax_api, client): - self._gax_api = gax_api + def __init__(self, gapic_api, client): + self._gapic_api = gapic_api self._client = client def list_entries(self, projects, filter_='', order_by='', @@ -88,21 +84,19 @@ def list_entries(self, projects, filter_='', order_by='', :returns: Iterator of :class:`~google.cloud.logging.entries._BaseEntry` accessible to the current API. """ - if page_token is None: - page_token = INITIAL_PAGE - options = CallOptions(page_token=page_token) - page_iter = self._gax_api.list_log_entries( + page_iter = self._gapic_api.list_log_entries( [], project_ids=projects, filter_=filter_, order_by=order_by, - page_size=page_size, options=options) + page_size=page_size) + page_iter.client = self._client + page_iter.next_page_token = page_token # We attach a mutable loggers dictionary so that as Logger # objects are created by entry_from_resource, they can be # re-used by other log entries from the same logger. loggers = {} - item_to_value = functools.partial( + page_iter._item_to_value = functools.partial( _item_to_entry, loggers=loggers) - return page_iterator._GAXIterator( - self._client, page_iter, item_to_value) + return page_iter def write_entries(self, entries, logger_name=None, resource=None, labels=None): @@ -123,12 +117,12 @@ def write_entries(self, entries, logger_name=None, resource=None, :param labels: default labels to associate with entries; individual entries may override. """ - options = None partial_success = False entry_pbs = [_log_entry_mapping_to_pb(entry) for entry in entries] - self._gax_api.write_log_entries( + print(entry_pbs) + self._gapic_api.write_log_entries( entry_pbs, log_name=logger_name, resource=resource, labels=labels, - partial_success=partial_success, options=options) + partial_success=partial_success) def logger_delete(self, project, logger_name): """API call: delete all entries in a logger via a DELETE request @@ -139,28 +133,22 @@ def logger_delete(self, project, logger_name): :type logger_name: str :param logger_name: name of logger containing the log entries to delete """ - options = None path = 'projects/%s/logs/%s' % (project, logger_name) - try: - self._gax_api.delete_log(path, options=options) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(path) - raise + self._gapic_api.delete_log(path) class _SinksAPI(object): """Helper mapping sink-related APIs. - :type gax_api: + :type gapic_api: :class:`.config_service_v2_client.ConfigServiceV2Client` - :param gax_api: API object used to make GAX requests. + :param gapic_api: API object used to make RPCs. :type client: :class:`~google.cloud.logging.client.Client` :param client: The client that owns this API object. """ - def __init__(self, gax_api, client): - self._gax_api = gax_api + def __init__(self, gapic_api, client): + self._gapic_api = gapic_api self._client = client def list_sinks(self, project, page_size=0, page_token=None): @@ -183,14 +171,13 @@ def list_sinks(self, project, page_size=0, page_token=None): if not None, indicates that more sinks can be retrieved with another call (pass that value as ``page_token``). """ - if page_token is None: - page_token = INITIAL_PAGE - options = CallOptions(page_token=page_token) path = 'projects/%s' % (project,) - page_iter = self._gax_api.list_sinks(path, page_size=page_size, - options=options) - return page_iterator._GAXIterator( - self._client, page_iter, _item_to_sink) + page_iter = self._gapic_api.list_sinks( + path, page_size=page_size) + page_iter.client = self._client + page_iter.next_page_token = page_token + page_iter._item_to_value = _item_to_sink + return page_iter def sink_create(self, project, sink_name, filter_, destination, unique_writer_identity=False): @@ -222,22 +209,14 @@ def sink_create(self, project, sink_name, filter_, destination, :returns: The sink resource returned from the API (converted from a protobuf to a dictionary). """ - options = None parent = 'projects/%s' % (project,) - sink_pb = LogSink(name=sink_name, filter=filter_, - destination=destination) - try: - created_pb = self._gax_api.create_sink( - parent, - sink_pb, - unique_writer_identity=unique_writer_identity, - options=options, - ) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: - path = 'projects/%s/sinks/%s' % (project, sink_name) - raise Conflict(path) - raise + sink_pb = LogSink( + name=sink_name, filter=filter_, destination=destination) + created_pb = self._gapic_api.create_sink( + parent, + sink_pb, + unique_writer_identity=unique_writer_identity + ) return MessageToDict(created_pb) def sink_get(self, project, sink_name): @@ -253,14 +232,8 @@ def sink_get(self, project, sink_name): :returns: The sink object returned from the API (converted from a protobuf to a dictionary). """ - options = None path = 'projects/%s/sinks/%s' % (project, sink_name) - try: - sink_pb = self._gax_api.get_sink(path, options=options) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(path) - raise + sink_pb = self._gapic_api.get_sink(path) # NOTE: LogSink message type does not have an ``Any`` field # so `MessageToDict`` can safely be used. return MessageToDict(sink_pb) @@ -292,19 +265,12 @@ def sink_update(self, project, sink_name, filter_, destination, :returns: The sink resource returned from the API (converted from a protobuf to a dictionary). """ - options = None path = 'projects/%s/sinks/%s' % (project, sink_name) sink_pb = LogSink(name=path, filter=filter_, destination=destination) - try: - sink_pb = self._gax_api.update_sink( - path, - sink_pb, - unique_writer_identity=unique_writer_identity, - options=options) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(path) - raise + sink_pb = self._gapic_api.update_sink( + path, + sink_pb, + unique_writer_identity=unique_writer_identity) # NOTE: LogSink message type does not have an ``Any`` field # so `MessageToDict`` can safely be used. return MessageToDict(sink_pb) @@ -318,29 +284,23 @@ def sink_delete(self, project, sink_name): :type sink_name: str :param sink_name: the name of the sink """ - options = None path = 'projects/%s/sinks/%s' % (project, sink_name) - try: - self._gax_api.delete_sink(path, options=options) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(path) - raise + self._gapic_api.delete_sink(path) class _MetricsAPI(object): """Helper mapping sink-related APIs. - :type gax_api: + :type gapic_api: :class:`.metrics_service_v2_client.MetricsServiceV2Client` - :param gax_api: API object used to make GAX requests. + :param gapic_api: API object used to make RPCs. :type client: :class:`~google.cloud.logging.client.Client` :param client: The client that owns this API object. """ - def __init__(self, gax_api, client): - self._gax_api = gax_api + def __init__(self, gapic_api, client): + self._gapic_api = gapic_api self._client = client def list_metrics(self, project, page_size=0, page_token=None): @@ -363,14 +323,13 @@ def list_metrics(self, project, page_size=0, page_token=None): :class:`~google.cloud.logging.metric.Metric` accessible to the current API. """ - if page_token is None: - page_token = INITIAL_PAGE - options = CallOptions(page_token=page_token) path = 'projects/%s' % (project,) - page_iter = self._gax_api.list_log_metrics( - path, page_size=page_size, options=options) - return page_iterator._GAXIterator( - self._client, page_iter, _item_to_metric) + page_iter = self._gapic_api.list_log_metrics( + path, page_size=page_size) + page_iter.client = self._client + page_iter.next_page_token = page_token + page_iter._item_to_value = _item_to_metric + return page_iter def metric_create(self, project, metric_name, filter_, description): """API call: create a metric resource. @@ -391,17 +350,10 @@ def metric_create(self, project, metric_name, filter_, description): :type description: str :param description: description of the metric. """ - options = None parent = 'projects/%s' % (project,) metric_pb = LogMetric(name=metric_name, filter=filter_, description=description) - try: - self._gax_api.create_log_metric(parent, metric_pb, options=options) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: - path = 'projects/%s/metrics/%s' % (project, metric_name) - raise Conflict(path) - raise + self._gapic_api.create_log_metric(parent, metric_pb) def metric_get(self, project, metric_name): """API call: retrieve a metric resource. @@ -416,14 +368,8 @@ def metric_get(self, project, metric_name): :returns: The metric object returned from the API (converted from a protobuf to a dictionary). """ - options = None path = 'projects/%s/metrics/%s' % (project, metric_name) - try: - metric_pb = self._gax_api.get_log_metric(path, options=options) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(path) - raise + metric_pb = self._gapic_api.get_log_metric(path) # NOTE: LogMetric message type does not have an ``Any`` field # so `MessageToDict`` can safely be used. return MessageToDict(metric_pb) @@ -448,17 +394,11 @@ def metric_update(self, project, metric_name, filter_, description): :returns: The metric object returned from the API (converted from a protobuf to a dictionary). """ - options = None path = 'projects/%s/metrics/%s' % (project, metric_name) metric_pb = LogMetric(name=path, filter=filter_, description=description) - try: - metric_pb = self._gax_api.update_log_metric( - path, metric_pb, options=options) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(path) - raise + metric_pb = self._gapic_api.update_log_metric( + path, metric_pb) # NOTE: LogMetric message type does not have an ``Any`` field # so `MessageToDict`` can safely be used. return MessageToDict(metric_pb) @@ -472,14 +412,8 @@ def metric_delete(self, project, metric_name): :type metric_name: str :param metric_name: the name of the metric """ - options = None path = 'projects/%s/metrics/%s' % (project, metric_name) - try: - self._gax_api.delete_log_metric(path, options=options) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(path) - raise + self._gapic_api.delete_log_metric(path) def _parse_log_entry(entry_pb): @@ -598,8 +532,8 @@ def _item_to_metric(iterator, log_metric_pb): return Metric.from_api_repr(resource, iterator.client) -def make_gax_logging_api(client): - """Create an instance of the GAX Logging API. +def make_logging_api(client): + """Create an instance of the Logging API adapter. :type client: :class:`~google.cloud.logging.client.Client` :param client: The client that holds configuration details. @@ -607,16 +541,13 @@ def make_gax_logging_api(client): :rtype: :class:`_LoggingAPI` :returns: A metrics API instance with the proper credentials. """ - channel = make_secure_channel( - client._credentials, DEFAULT_USER_AGENT, - LoggingServiceV2Client.SERVICE_ADDRESS) generated = LoggingServiceV2Client( - channel=channel, lib_name='gccl', lib_version=__version__) + credentials=client._credentials, client_info=_CLIENT_INFO) return _LoggingAPI(generated, client) -def make_gax_metrics_api(client): - """Create an instance of the GAX Metrics API. +def make_metrics_api(client): + """Create an instance of the Metrics API adapter. :type client: :class:`~google.cloud.logging.client.Client` :param client: The client that holds configuration details. @@ -624,16 +555,13 @@ def make_gax_metrics_api(client): :rtype: :class:`_MetricsAPI` :returns: A metrics API instance with the proper credentials. """ - channel = make_secure_channel( - client._credentials, DEFAULT_USER_AGENT, - MetricsServiceV2Client.SERVICE_ADDRESS) generated = MetricsServiceV2Client( - channel=channel, lib_name='gccl', lib_version=__version__) + credentials=client._credentials, client_info=_CLIENT_INFO) return _MetricsAPI(generated, client) -def make_gax_sinks_api(client): - """Create an instance of the GAX Sinks API. +def make_sinks_api(client): + """Create an instance of the Sinks API adapter. :type client: :class:`~google.cloud.logging.client.Client` :param client: The client that holds configuration details. @@ -641,9 +569,6 @@ def make_gax_sinks_api(client): :rtype: :class:`_SinksAPI` :returns: A metrics API instance with the proper credentials. """ - channel = make_secure_channel( - client._credentials, DEFAULT_USER_AGENT, - ConfigServiceV2Client.SERVICE_ADDRESS) generated = ConfigServiceV2Client( - channel=channel, lib_name='gccl', lib_version=__version__) + credentials=client._credentials, client_info=_CLIENT_INFO) return _SinksAPI(generated, client) diff --git a/logging/google/cloud/logging/client.py b/logging/google/cloud/logging/client.py index 92506bdcc2f7..5be04dab6d37 100644 --- a/logging/google/cloud/logging/client.py +++ b/logging/google/cloud/logging/client.py @@ -18,14 +18,10 @@ import os try: - from google.cloud.logging._gax import make_gax_logging_api - from google.cloud.logging._gax import make_gax_metrics_api - from google.cloud.logging._gax import make_gax_sinks_api + from google.cloud.logging import _gapic except ImportError: # pragma: NO COVER _HAVE_GRPC = False - make_gax_logging_api = None - make_gax_metrics_api = None - make_gax_sinks_api = None + _gapic = None else: _HAVE_GRPC = True @@ -85,7 +81,7 @@ class Client(ClientWithProject): :type _use_grpc: bool :param _use_grpc: (Optional) Explicitly specifies whether - to use the gRPC transport (via GAX) or HTTP. If unset, + to use the gRPC transport or HTTP. If unset, falls back to the ``GOOGLE_CLOUD_DISABLE_GRPC`` environment variable This parameter should be considered private, and could @@ -122,7 +118,7 @@ def logging_api(self): """ if self._logging_api is None: if self._use_grpc: - self._logging_api = make_gax_logging_api(self) + self._logging_api = _gapic.make_logging_api(self) else: self._logging_api = JSONLoggingAPI(self) return self._logging_api @@ -136,7 +132,7 @@ def sinks_api(self): """ if self._sinks_api is None: if self._use_grpc: - self._sinks_api = make_gax_sinks_api(self) + self._sinks_api = _gapic.make_sinks_api(self) else: self._sinks_api = JSONSinksAPI(self) return self._sinks_api @@ -150,7 +146,7 @@ def metrics_api(self): """ if self._metrics_api is None: if self._use_grpc: - self._metrics_api = make_gax_metrics_api(self) + self._metrics_api = _gapic.make_metrics_api(self) else: self._metrics_api = JSONMetricsAPI(self) return self._metrics_api diff --git a/logging/nox.py b/logging/nox.py index c25cc45e4b2d..fdf48fedafa4 100644 --- a/logging/nox.py +++ b/logging/nox.py @@ -112,8 +112,9 @@ def system(session, py): 'py.test', '-vvv', '-s', - 'tests/system.py', + 'tests/system', *session.posargs, + # Currently allowed to fail due to very high flakiness. success_codes=range(0, 100) ) diff --git a/logging/setup.py b/logging/setup.py index 5ecaed3aff74..6eb782259fed 100644 --- a/logging/setup.py +++ b/logging/setup.py @@ -51,9 +51,8 @@ REQUIREMENTS = [ - 'google-cloud-core[grpc] >= 0.28.0, < 0.29dev', - 'google-api-core >= 0.1.1, < 0.2.0dev', - 'gapic-google-cloud-logging-v2 >= 0.91.0, < 0.92dev', + 'google-cloud-core >= 0.28.0, < 0.29dev', + 'google-api-core[grpc] >= 0.1.1, < 0.2.0dev', ] setup( diff --git a/logging/tests/system.py b/logging/tests/system/test_system.py similarity index 97% rename from logging/tests/system.py rename to logging/tests/system/test_system.py index 3449438600cc..ffa4615612d9 100644 --- a/logging/tests/system.py +++ b/logging/tests/system/test_system.py @@ -16,14 +16,11 @@ import logging import unittest -from google.gax.errors import GaxError -from google.gax.grpc import exc_to_code -from grpc import StatusCode - from google.cloud._helpers import UTC from google.cloud.exceptions import Conflict from google.cloud.exceptions import NotFound from google.cloud.exceptions import TooManyRequests +from google.cloud.exceptions import ServiceUnavailable import google.cloud.logging import google.cloud.logging.handlers.handlers from google.cloud.logging.handlers.handlers import CloudLoggingHandler @@ -41,18 +38,6 @@ retry_429 = RetryErrors(TooManyRequests) -def _retry_on_unavailable(exc): - """Retry only errors whose status code is 'UNAVAILABLE'. - - :type exc: :class:`~google.gax.errors.GaxError` - :param exc: The exception that was caught. - - :rtype: bool - :returns: Boolean indicating if the exception was UNAVAILABLE. - """ - return exc_to_code(exc) == StatusCode.UNAVAILABLE - - def _consume_entries(logger): """Consume all log entries from logger iterator. @@ -78,7 +63,7 @@ def _list_entries(logger): :returns: List of all entries consumed. """ inner = RetryResult(_has_entries)(_consume_entries) - outer = RetryErrors(GaxError, _retry_on_unavailable)(inner) + outer = RetryErrors(ServiceUnavailable)(inner) return outer(logger) diff --git a/logging/tests/unit/test__gapic.py b/logging/tests/unit/test__gapic.py new file mode 100644 index 000000000000..ffa7e7e6d7f0 --- /dev/null +++ b/logging/tests/unit/test__gapic.py @@ -0,0 +1,650 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +from google.api_core import grpc_helpers +import google.auth.credentials +from google.protobuf import empty_pb2 +import mock + +import google.cloud.logging +from google.cloud.logging import _gapic +from google.cloud.logging_v2.gapic import config_service_v2_client +from google.cloud.logging_v2.gapic import logging_service_v2_client +from google.cloud.logging_v2.gapic import metrics_service_v2_client +from google.cloud.logging_v2.proto import log_entry_pb2 +from google.cloud.logging_v2.proto import logging_pb2 +from google.cloud.logging_v2.proto import logging_config_pb2 +from google.cloud.logging_v2.proto import logging_metrics_pb2 + + +PROJECT = 'PROJECT' +PROJECT_PATH = 'projects/%s' % (PROJECT,) +FILTER = 'logName:syslog AND severity>=ERROR' + + +def make_credentials(): + return mock.create_autospec( + google.auth.credentials.Credentials, instance=True) + + +class Test_LoggingAPI(object): + LOG_NAME = 'log_name' + LOG_PATH = 'projects/%s/logs/%s' % (PROJECT, LOG_NAME) + + @staticmethod + def make_logging_api(): + channel = grpc_helpers.ChannelStub() + gapic_client = logging_service_v2_client.LoggingServiceV2Client( + channel=channel) + handwritten_client = mock.Mock() + api = _gapic._LoggingAPI(gapic_client, handwritten_client) + return channel, api + + def test_ctor(self): + channel = grpc_helpers.ChannelStub() + gapic_client = logging_service_v2_client.LoggingServiceV2Client( + channel=channel) + api = _gapic._LoggingAPI(gapic_client, mock.sentinel.client) + assert api._gapic_api is gapic_client + assert api._client is mock.sentinel.client + + def test_list_entries(self): + channel, api = self.make_logging_api() + + log_entry_msg = log_entry_pb2.LogEntry( + log_name=self.LOG_PATH, + text_payload='text') + channel.ListLogEntries.response = logging_pb2.ListLogEntriesResponse( + entries=[log_entry_msg]) + result = api.list_entries( + [PROJECT], FILTER, google.cloud.logging.DESCENDING) + + entries = list(result) + + # Check the response + assert len(entries) == 1 + entry = entries[0] + assert isinstance(entry, google.cloud.logging.entries.TextEntry) + assert entry.payload == 'text' + + # Check the request + assert len(channel.ListLogEntries.requests) == 1 + request = channel.ListLogEntries.requests[0] + assert request.project_ids == [PROJECT] + assert request.filter == FILTER + assert request.order_by == google.cloud.logging.DESCENDING + + def test_list_entries_with_options(self): + channel, api = self.make_logging_api() + + channel.ListLogEntries.response = logging_pb2.ListLogEntriesResponse( + entries=[]) + + result = api.list_entries( + [PROJECT], FILTER, google.cloud.logging.ASCENDING, page_size=42, + page_token='token') + + list(result) + + # Check the request + assert len(channel.ListLogEntries.requests) == 1 + request = channel.ListLogEntries.requests[0] + assert request.project_ids == [PROJECT] + assert request.filter == FILTER + assert request.order_by == google.cloud.logging.ASCENDING + assert request.page_size == 42 + assert request.page_token == 'token' + + def test_write_entries_single(self): + channel, api = self.make_logging_api() + + channel.WriteLogEntries.response = empty_pb2.Empty() + + entry = { + 'logName': self.LOG_PATH, + 'resource': {'type': 'global'}, + 'textPayload': 'text', + } + + api.write_entries([entry]) + + # Check the request + assert len(channel.WriteLogEntries.requests) == 1 + request = channel.WriteLogEntries.requests[0] + assert request.partial_success is False + assert len(request.entries) == 1 + assert request.entries[0].log_name == entry['logName'] + assert request.entries[0].resource.type == entry['resource']['type'] + assert request.entries[0].text_payload == 'text' + + def test_logger_delete(self): + channel, api = self.make_logging_api() + + channel.DeleteLog.response = empty_pb2.Empty() + + api.logger_delete(PROJECT, self.LOG_NAME) + + assert len(channel.DeleteLog.requests) == 1 + request = channel.DeleteLog.requests[0] + assert request.log_name == self.LOG_PATH + + +class Test_SinksAPI(object): + SINK_NAME = 'sink_name' + SINK_PATH = 'projects/%s/sinks/%s' % (PROJECT, SINK_NAME) + DESTINATION_URI = 'faux.googleapis.com/destination' + SINK_WRITER_IDENTITY = 'serviceAccount:project-123@example.com' + + @staticmethod + def make_sinks_api(): + channel = grpc_helpers.ChannelStub() + gapic_client = config_service_v2_client.ConfigServiceV2Client( + channel=channel) + handwritten_client = mock.Mock() + api = _gapic._SinksAPI(gapic_client, handwritten_client) + return channel, api + + def test_ctor(self): + channel = grpc_helpers.ChannelStub() + gapic_client = config_service_v2_client.ConfigServiceV2Client( + channel=channel) + api = _gapic._SinksAPI(gapic_client, mock.sentinel.client) + assert api._gapic_api is gapic_client + assert api._client is mock.sentinel.client + + def test_list_sinks(self): + channel, api = self.make_sinks_api() + + sink_msg = logging_config_pb2.LogSink( + name=self.SINK_PATH, + destination=self.DESTINATION_URI, + filter=FILTER) + channel.ListSinks.response = logging_config_pb2.ListSinksResponse( + sinks=[sink_msg]) + + result = api.list_sinks(PROJECT) + sinks = list(result) + + # Check the response + assert len(sinks) == 1 + sink = sinks[0] + assert isinstance(sink, google.cloud.logging.sink.Sink) + assert sink.name == self.SINK_PATH + assert sink.destination == self.DESTINATION_URI + assert sink.filter_ == FILTER + + # Check the request + assert len(channel.ListSinks.requests) == 1 + request = channel.ListSinks.requests[0] + assert request.parent == PROJECT_PATH + + def test_list_sinks_with_options(self): + channel, api = self.make_sinks_api() + + channel.ListSinks.response = logging_config_pb2.ListSinksResponse( + sinks=[]) + + result = api.list_sinks(PROJECT, page_size=42, page_token='token') + list(result) + + # Check the request + assert len(channel.ListSinks.requests) == 1 + request = channel.ListSinks.requests[0] + assert request.parent == 'projects/%s' % PROJECT + assert request.page_size == 42 + assert request.page_token == 'token' + + def test_sink_create(self): + channel, api = self.make_sinks_api() + + channel.CreateSink.response = logging_config_pb2.LogSink( + name=self.SINK_NAME, + destination=self.DESTINATION_URI, + filter=FILTER, + writer_identity=self.SINK_WRITER_IDENTITY, + ) + + result = api.sink_create( + PROJECT, + self.SINK_NAME, + FILTER, + self.DESTINATION_URI, + unique_writer_identity=True, + ) + + # Check response + assert result == { + 'name': self.SINK_NAME, + 'filter': FILTER, + 'destination': self.DESTINATION_URI, + 'writerIdentity': self.SINK_WRITER_IDENTITY, + } + + # Check request + assert len(channel.CreateSink.requests) == 1 + request = channel.CreateSink.requests[0] + assert request.parent == PROJECT_PATH + assert request.unique_writer_identity is True + assert request.sink.name == self.SINK_NAME + assert request.sink.filter == FILTER + assert request.sink.destination == self.DESTINATION_URI + + def test_sink_get(self): + channel, api = self.make_sinks_api() + + channel.GetSink.response = logging_config_pb2.LogSink( + name=self.SINK_PATH, + destination=self.DESTINATION_URI, + filter=FILTER) + + response = api.sink_get(PROJECT, self.SINK_NAME) + + # Check response + assert response == { + 'name': self.SINK_PATH, + 'filter': FILTER, + 'destination': self.DESTINATION_URI + } + + # Check request + assert len(channel.GetSink.requests) == 1 + request = channel.GetSink.requests[0] + assert request.sink_name == self.SINK_PATH + + def test_sink_update(self): + channel, api = self.make_sinks_api() + + channel.UpdateSink.response = logging_config_pb2.LogSink( + name=self.SINK_NAME, + destination=self.DESTINATION_URI, + filter=FILTER, + writer_identity=self.SINK_WRITER_IDENTITY, + ) + + result = api.sink_update( + PROJECT, + self.SINK_NAME, + FILTER, + self.DESTINATION_URI, + unique_writer_identity=True) + + # Check response + assert result == { + 'name': self.SINK_NAME, + 'filter': FILTER, + 'destination': self.DESTINATION_URI, + 'writerIdentity': self.SINK_WRITER_IDENTITY, + } + + # Check request + assert len(channel.UpdateSink.requests) == 1 + request = channel.UpdateSink.requests[0] + assert request.sink_name == self.SINK_PATH + assert request.unique_writer_identity is True + assert request.sink.name == self.SINK_PATH + assert request.sink.filter == FILTER + assert request.sink.destination == self.DESTINATION_URI + + def test_sink_delete(self): + channel, api = self.make_sinks_api() + + channel.DeleteSink.response = empty_pb2.Empty() + + api.sink_delete(PROJECT, self.SINK_NAME) + + assert len(channel.DeleteSink.requests) == 1 + request = channel.DeleteSink.requests[0] + assert request.sink_name == self.SINK_PATH + + +class Test_MetricsAPI(object): + METRIC_NAME = 'metric_name' + METRIC_PATH = 'projects/%s/metrics/%s' % (PROJECT, METRIC_NAME) + DESCRIPTION = 'Description' + + @staticmethod + def make_metrics_api(): + channel = grpc_helpers.ChannelStub() + gapic_client = metrics_service_v2_client.MetricsServiceV2Client( + channel=channel) + handwritten_client = mock.Mock() + api = _gapic._MetricsAPI(gapic_client, handwritten_client) + return channel, api + + def test_ctor(self): + channel = grpc_helpers.ChannelStub() + gapic_client = metrics_service_v2_client.MetricsServiceV2Client( + channel=channel) + api = _gapic._MetricsAPI(gapic_client, mock.sentinel.client) + assert api._gapic_api is gapic_client + assert api._client is mock.sentinel.client + + def test_list_metrics(self): + channel, api = self.make_metrics_api() + + sink_msg = logging_metrics_pb2.LogMetric( + name=self.METRIC_PATH, + description=self.DESCRIPTION, + filter=FILTER) + channel.ListLogMetrics.response = ( + logging_metrics_pb2.ListLogMetricsResponse( + metrics=[sink_msg])) + + result = api.list_metrics(PROJECT) + metrics = list(result) + + # Check the response + assert len(metrics) == 1 + metric = metrics[0] + assert isinstance(metric, google.cloud.logging.metric.Metric) + assert metric.name == self.METRIC_PATH + assert metric.description == self.DESCRIPTION + assert metric.filter_ == FILTER + + # Check the request + assert len(channel.ListLogMetrics.requests) == 1 + request = channel.ListLogMetrics.requests[0] + assert request.parent == PROJECT_PATH + + def test_list_metrics_options(self): + channel, api = self.make_metrics_api() + + channel.ListLogMetrics.response = ( + logging_metrics_pb2.ListLogMetricsResponse( + metrics=[])) + + result = api.list_metrics(PROJECT, page_size=42, page_token='token') + list(result) + + # Check the request + assert len(channel.ListLogMetrics.requests) == 1 + request = channel.ListLogMetrics.requests[0] + assert request.parent == PROJECT_PATH + assert request.page_size == 42 + assert request.page_token == 'token' + + def test_metric_create(self): + channel, api = self.make_metrics_api() + + channel.CreateLogMetric.response = empty_pb2.Empty() + + api.metric_create( + PROJECT, self.METRIC_NAME, FILTER, self.DESCRIPTION) + + # Check the request + assert len(channel.CreateLogMetric.requests) == 1 + request = channel.CreateLogMetric.requests[0] + assert request.parent == PROJECT_PATH + assert request.metric.name == self.METRIC_NAME + assert request.metric.filter == FILTER + assert request.metric.description == self.DESCRIPTION + + def test_metric_get(self): + channel, api = self.make_metrics_api() + + channel.GetLogMetric.response = logging_metrics_pb2.LogMetric( + name=self.METRIC_PATH, + description=self.DESCRIPTION, + filter=FILTER) + + response = api.metric_get(PROJECT, self.METRIC_NAME) + + # Check the response + assert response == { + 'name': self.METRIC_PATH, + 'filter': FILTER, + 'description': self.DESCRIPTION, + } + + # Check the request + assert len(channel.GetLogMetric.requests) == 1 + request = channel.GetLogMetric.requests[0] + assert request.metric_name == self.METRIC_PATH + + def test_metric_update(self): + channel, api = self.make_metrics_api() + + channel.UpdateLogMetric.response = logging_metrics_pb2.LogMetric( + name=self.METRIC_PATH, + description=self.DESCRIPTION, + filter=FILTER) + + response = api.metric_update( + PROJECT, self.METRIC_NAME, FILTER, self.DESCRIPTION) + + # Check the response + assert response == { + 'name': self.METRIC_PATH, + 'filter': FILTER, + 'description': self.DESCRIPTION, + } + + # Check the request + assert len(channel.UpdateLogMetric.requests) == 1 + request = channel.UpdateLogMetric.requests[0] + assert request.metric_name == self.METRIC_PATH + assert request.metric.name == self.METRIC_PATH + assert request.metric.filter == FILTER + assert request.metric.description == self.DESCRIPTION + + def test_metric_delete(self): + channel, api = self.make_metrics_api() + + channel.DeleteLogMetric.response = empty_pb2.Empty() + + api.metric_delete(PROJECT, self.METRIC_NAME) + + assert len(channel.DeleteLogMetric.requests) == 1 + request = channel.DeleteLogMetric.requests[0] + assert request.metric_name == self.METRIC_PATH + + +class Test__parse_log_entry(unittest.TestCase): + + @staticmethod + def _call_fut(*args, **kwargs): + from google.cloud.logging._gapic import _parse_log_entry + + return _parse_log_entry(*args, **kwargs) + + def test_simple(self): + from google.cloud.logging_v2.proto.log_entry_pb2 import LogEntry + + entry_pb = LogEntry(log_name=u'lol-jk', text_payload=u'bah humbug') + result = self._call_fut(entry_pb) + expected = { + 'logName': entry_pb.log_name, + 'textPayload': entry_pb.text_payload, + } + self.assertEqual(result, expected) + + @mock.patch('google.cloud.logging._gapic.MessageToDict', + side_effect=TypeError) + def test_non_registry_failure(self, msg_to_dict_mock): + entry_pb = mock.Mock(spec=['HasField']) + entry_pb.HasField.return_value = False + with self.assertRaises(TypeError): + self._call_fut(entry_pb) + + entry_pb.HasField.assert_called_once_with('proto_payload') + msg_to_dict_mock.assert_called_once_with(entry_pb) + + def test_unregistered_type(self): + from google.cloud.logging_v2.proto.log_entry_pb2 import LogEntry + from google.protobuf import any_pb2 + from google.protobuf import descriptor_pool + from google.protobuf.timestamp_pb2 import Timestamp + + pool = descriptor_pool.Default() + type_name = 'google.bigtable.admin.v2.UpdateClusterMetadata' + # Make sure the descriptor is not known in the registry. + with self.assertRaises(KeyError): + pool.FindMessageTypeByName(type_name) + + type_url = 'type.googleapis.com/' + type_name + metadata_bytes = ( + b'\n\n\n\x03foo\x12\x03bar\x12\x06\x08\xbd\xb6\xfb\xc6\x05') + any_pb = any_pb2.Any(type_url=type_url, value=metadata_bytes) + timestamp = Timestamp(seconds=61, nanos=1234000) + + entry_pb = LogEntry(proto_payload=any_pb, timestamp=timestamp) + result = self._call_fut(entry_pb) + self.assertEqual(len(result), 2) + self.assertEqual(result['timestamp'], '1970-01-01T00:01:01.001234Z') + # NOTE: This "hack" is needed on Windows, where the equality check + # for an ``Any`` instance fails on unregistered types. + self.assertEqual(result['protoPayload'].type_url, type_url) + self.assertEqual(result['protoPayload'].value, metadata_bytes) + + def test_registered_type(self): + from google.cloud.logging_v2.proto.log_entry_pb2 import LogEntry + from google.protobuf import any_pb2 + from google.protobuf import descriptor_pool + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value + + pool = descriptor_pool.Default() + type_name = 'google.protobuf.Struct' + # Make sure the descriptor is known in the registry. + descriptor = pool.FindMessageTypeByName(type_name) + self.assertEqual(descriptor.name, 'Struct') + + type_url = 'type.googleapis.com/' + type_name + field_name = 'foo' + field_value = u'Bar' + struct_pb = Struct( + fields={field_name: Value(string_value=field_value)}) + any_pb = any_pb2.Any( + type_url=type_url, + value=struct_pb.SerializeToString(), + ) + + entry_pb = LogEntry(proto_payload=any_pb, log_name=u'all-good') + result = self._call_fut(entry_pb) + expected_proto = { + 'logName': entry_pb.log_name, + 'protoPayload': { + '@type': type_url, + 'value': {field_name: field_value}, + }, + } + self.assertEqual(result, expected_proto) + + +class Test__log_entry_mapping_to_pb(unittest.TestCase): + + @staticmethod + def _call_fut(*args, **kwargs): + from google.cloud.logging._gapic import _log_entry_mapping_to_pb + + return _log_entry_mapping_to_pb(*args, **kwargs) + + def test_simple(self): + from google.cloud.logging_v2.proto.log_entry_pb2 import LogEntry + + result = self._call_fut({}) + self.assertEqual(result, LogEntry()) + + def test_unregistered_type(self): + from google.protobuf import descriptor_pool + from google.protobuf.json_format import ParseError + + pool = descriptor_pool.Default() + type_name = 'google.bigtable.admin.v2.UpdateClusterMetadata' + # Make sure the descriptor is not known in the registry. + with self.assertRaises(KeyError): + pool.FindMessageTypeByName(type_name) + + type_url = 'type.googleapis.com/' + type_name + json_mapping = { + 'protoPayload': { + '@type': type_url, + 'originalRequest': { + 'name': 'foo', + 'location': 'bar', + }, + 'requestTime': { + 'seconds': 1491000125, + }, + }, + } + with self.assertRaises(ParseError): + self._call_fut(json_mapping) + + def test_registered_type(self): + from google.cloud.logging_v2.proto.log_entry_pb2 import LogEntry + from google.protobuf import any_pb2 + from google.protobuf import descriptor_pool + + pool = descriptor_pool.Default() + type_name = 'google.protobuf.Struct' + # Make sure the descriptor is known in the registry. + descriptor = pool.FindMessageTypeByName(type_name) + self.assertEqual(descriptor.name, 'Struct') + + type_url = 'type.googleapis.com/' + type_name + field_name = 'foo' + field_value = u'Bar' + json_mapping = { + 'logName': u'hi-everybody', + 'protoPayload': { + '@type': type_url, + 'value': {field_name: field_value}, + }, + } + # Convert to a valid LogEntry. + result = self._call_fut(json_mapping) + entry_pb = LogEntry( + log_name=json_mapping['logName'], + proto_payload=any_pb2.Any( + type_url=type_url, + value=b'\n\014\n\003foo\022\005\032\003Bar', + ), + ) + self.assertEqual(result, entry_pb) + + +@mock.patch( + 'google.cloud.logging._gapic.LoggingServiceV2Client', autospec=True) +def test_make_logging_api(gapic_client): + client = mock.Mock(spec=['_credentials']) + api = _gapic.make_logging_api(client) + assert api._client == client + assert api._gapic_api == gapic_client.return_value + gapic_client.assert_called_once_with( + credentials=client._credentials, client_info=_gapic._CLIENT_INFO) + + +@mock.patch( + 'google.cloud.logging._gapic.MetricsServiceV2Client', autospec=True) +def test_make_metrics_api(gapic_client): + client = mock.Mock(spec=['_credentials']) + api = _gapic.make_metrics_api(client) + assert api._client == client + assert api._gapic_api == gapic_client.return_value + gapic_client.assert_called_once_with( + credentials=client._credentials, client_info=_gapic._CLIENT_INFO) + + +@mock.patch( + 'google.cloud.logging._gapic.ConfigServiceV2Client', autospec=True) +def test_make_sinks_api(gapic_client): + client = mock.Mock(spec=['_credentials']) + api = _gapic.make_sinks_api(client) + assert api._client == client + assert api._gapic_api == gapic_client.return_value + gapic_client.assert_called_once_with( + credentials=client._credentials, client_info=_gapic._CLIENT_INFO) diff --git a/logging/tests/unit/test__gax.py b/logging/tests/unit/test__gax.py deleted file mode 100644 index c2c5f3199abf..000000000000 --- a/logging/tests/unit/test__gax.py +++ /dev/null @@ -1,1614 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - -try: - # pylint: disable=unused-import - import google.cloud.logging._gax - # pylint: enable=unused-import -except ImportError: # pragma: NO COVER - _HAVE_GRPC = False -else: - _HAVE_GRPC = True - -from google.cloud._testing import _GAXBaseAPI - - -def _make_credentials(): - # pylint: disable=redefined-outer-name - import google.auth.credentials - # pylint: enable=redefined-outer-name - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -class _Base(object): - PROJECT = 'PROJECT' - PROJECT_PATH = 'projects/%s' % (PROJECT,) - FILTER = 'logName:syslog AND severity>=ERROR' - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test_LoggingAPI(_Base, unittest.TestCase): - LOG_NAME = 'log_name' - LOG_PATH = 'projects/%s/logs/%s' % (_Base.PROJECT, LOG_NAME) - - @staticmethod - def _get_target_class(): - from google.cloud.logging._gax import _LoggingAPI - - return _LoggingAPI - - def test_ctor(self): - gax_api = _GAXLoggingAPI() - client = object() - api = self._make_one(gax_api, client) - self.assertIs(api._gax_api, gax_api) - self.assertIs(api._client, client) - - def test_list_entries_no_paging(self): - import datetime - - from google.api.monitored_resource_pb2 import MonitoredResource - from google.gax import INITIAL_PAGE - from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry - - from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud._helpers import UTC - from google.cloud._testing import _GAXPageIterator - from google.cloud.logging import DESCENDING - from google.cloud.logging.client import Client - from google.cloud.logging.entries import TextEntry - from google.cloud.logging.logger import Logger - - TOKEN = 'TOKEN' - TEXT = 'TEXT' - resource_pb = MonitoredResource(type='global') - timestamp = datetime.datetime.utcnow().replace(tzinfo=UTC) - timestamp_pb = _datetime_to_pb_timestamp(timestamp) - entry_pb = LogEntry(log_name=self.LOG_PATH, - resource=resource_pb, - timestamp=timestamp_pb, - text_payload=TEXT) - response = _GAXPageIterator([entry_pb], page_token=TOKEN) - gax_api = _GAXLoggingAPI(_list_log_entries_response=response) - client = Client(project=self.PROJECT, credentials=_make_credentials(), - _use_grpc=True) - api = self._make_one(gax_api, client) - - iterator = api.list_entries( - [self.PROJECT], self.FILTER, DESCENDING) - entries = list(iterator) - next_token = iterator.next_page_token - - # First check the token. - self.assertEqual(next_token, TOKEN) - # Then check the entries returned. - self.assertEqual(len(entries), 1) - entry = entries[0] - self.assertIsInstance(entry, TextEntry) - self.assertEqual(entry.payload, TEXT) - self.assertIsInstance(entry.logger, Logger) - self.assertEqual(entry.logger.name, self.LOG_NAME) - self.assertIsNone(entry.insert_id) - self.assertEqual(entry.timestamp, timestamp) - self.assertIsNone(entry.labels) - self.assertIsNone(entry.severity) - self.assertIsNone(entry.http_request) - - resource_names, projects, filter_, order_by, page_size, options = ( - gax_api._list_log_entries_called_with) - self.assertEqual(resource_names, []) - self.assertEqual(projects, [self.PROJECT]) - self.assertEqual(filter_, self.FILTER) - self.assertEqual(order_by, DESCENDING) - self.assertEqual(page_size, 0) - self.assertIs(options.page_token, INITIAL_PAGE) - - def _list_entries_with_paging_helper(self, payload, struct_pb): - import datetime - - from google.api.monitored_resource_pb2 import MonitoredResource - from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry - from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud._helpers import UTC - from google.cloud._testing import _GAXPageIterator - from google.cloud.logging.client import Client - from google.cloud.logging.entries import StructEntry - from google.cloud.logging.logger import Logger - - SIZE = 23 - TOKEN = 'TOKEN' - NEW_TOKEN = 'NEW_TOKEN' - resource_pb = MonitoredResource(type='global') - timestamp = datetime.datetime.utcnow().replace(tzinfo=UTC) - timestamp_pb = _datetime_to_pb_timestamp(timestamp) - entry_pb = LogEntry(log_name=self.LOG_PATH, - resource=resource_pb, - timestamp=timestamp_pb, - json_payload=struct_pb) - response = _GAXPageIterator([entry_pb], page_token=NEW_TOKEN) - gax_api = _GAXLoggingAPI(_list_log_entries_response=response) - client = Client(project=self.PROJECT, credentials=_make_credentials(), - _use_grpc=True) - api = self._make_one(gax_api, client) - - iterator = api.list_entries( - [self.PROJECT], page_size=SIZE, page_token=TOKEN) - entries = list(iterator) - next_token = iterator.next_page_token - - # First check the token. - self.assertEqual(next_token, NEW_TOKEN) - self.assertEqual(len(entries), 1) - entry = entries[0] - self.assertIsInstance(entry, StructEntry) - self.assertEqual(entry.payload, payload) - self.assertIsInstance(entry.logger, Logger) - self.assertEqual(entry.logger.name, self.LOG_NAME) - self.assertIsNone(entry.insert_id) - self.assertEqual(entry.timestamp, timestamp) - self.assertIsNone(entry.labels) - self.assertIsNone(entry.severity) - self.assertIsNone(entry.http_request) - - resource_names, projects, filter_, order_by, page_size, options = ( - gax_api._list_log_entries_called_with) - self.assertEqual(resource_names, []) - self.assertEqual(projects, [self.PROJECT]) - self.assertEqual(filter_, '') - self.assertEqual(order_by, '') - self.assertEqual(page_size, SIZE) - self.assertEqual(options.page_token, TOKEN) - - def test_list_entries_with_paging(self): - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value - - payload = {'message': 'MESSAGE', 'weather': 'sunny'} - struct_pb = Struct(fields={ - key: Value(string_value=value) for key, value in payload.items() - }) - self._list_entries_with_paging_helper(payload, struct_pb) - - def test_list_entries_with_paging_nested_payload(self): - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value - - payload = {} - struct_fields = {} - # Add a simple key. - key = 'message' - payload[key] = 'MESSAGE' - struct_fields[key] = Value(string_value=payload[key]) - # Add a nested key. - key = 'weather' - sub_value = {} - sub_fields = {} - sub_key = 'temperature' - sub_value[sub_key] = 75 - sub_fields[sub_key] = Value(number_value=sub_value[sub_key]) - sub_key = 'precipitation' - sub_value[sub_key] = False - sub_fields[sub_key] = Value(bool_value=sub_value[sub_key]) - # Update the parent payload. - payload[key] = sub_value - struct_fields[key] = Value(struct_value=Struct(fields=sub_fields)) - # Make the struct_pb for our dict. - struct_pb = Struct(fields=struct_fields) - self._list_entries_with_paging_helper(payload, struct_pb) - - def _make_log_entry_with_extras(self, labels, iid, type_url, now): - from google.api.monitored_resource_pb2 import MonitoredResource - from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry - from google.cloud.proto.logging.v2.log_entry_pb2 import ( - LogEntryOperation) - from google.logging.type.http_request_pb2 import HttpRequest - from google.logging.type.log_severity_pb2 import WARNING - from google.protobuf.any_pb2 import Any - - from google.cloud._helpers import _datetime_to_pb_timestamp - - resource_pb = MonitoredResource( - type='global', labels=labels) - proto_payload = Any(type_url=type_url) - timestamp_pb = _datetime_to_pb_timestamp(now) - request_pb = HttpRequest( - request_url='http://example.com/requested', - request_method='GET', - status=200, - referer='http://example.com/referer', - user_agent='AGENT', - cache_hit=True, - request_size=256, - response_size=1024, - remote_ip='1.2.3.4', - ) - operation_pb = LogEntryOperation( - producer='PRODUCER', - first=True, - last=True, - id='OPID', - ) - entry_pb = LogEntry(log_name=self.LOG_PATH, - resource=resource_pb, - proto_payload=proto_payload, - timestamp=timestamp_pb, - severity=WARNING, - insert_id=iid, - http_request=request_pb, - labels=labels, - operation=operation_pb) - return entry_pb - - def test_list_entries_with_extra_properties(self): - import datetime - - # Import the wrappers to register the type URL for BoolValue - # pylint: disable=unused-variable - from google.protobuf import wrappers_pb2 - # pylint: enable=unused-variable - - from google.cloud._helpers import UTC - from google.cloud._testing import _GAXPageIterator - from google.cloud.logging.client import Client - from google.cloud.logging.entries import ProtobufEntry - from google.cloud.logging.logger import Logger - - NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) - SIZE = 23 - TOKEN = 'TOKEN' - NEW_TOKEN = 'NEW_TOKEN' - SEVERITY = 'WARNING' - LABELS = { - 'foo': 'bar', - } - IID = 'IID' - bool_type_url = 'type.googleapis.com/google.protobuf.BoolValue' - entry_pb = self._make_log_entry_with_extras( - LABELS, IID, bool_type_url, NOW) - - response = _GAXPageIterator([entry_pb], page_token=NEW_TOKEN) - gax_api = _GAXLoggingAPI(_list_log_entries_response=response) - client = Client(project=self.PROJECT, credentials=_make_credentials(), - _use_grpc=True) - api = self._make_one(gax_api, client) - - iterator = api.list_entries( - [self.PROJECT], page_size=SIZE, page_token=TOKEN) - entries = list(iterator) - next_token = iterator.next_page_token - - # First check the token. - self.assertEqual(next_token, NEW_TOKEN) - # Then check the entries returned. - self.assertEqual(len(entries), 1) - entry = entries[0] - self.assertIsInstance(entry, ProtobufEntry) - self.assertEqual(entry.payload, { - '@type': bool_type_url, - 'value': False, - }) - self.assertIsInstance(entry.logger, Logger) - self.assertEqual(entry.logger.name, self.LOG_NAME) - self.assertEqual(entry.insert_id, IID) - self.assertEqual(entry.timestamp, NOW) - self.assertEqual(entry.labels, {'foo': 'bar'}) - self.assertEqual(entry.severity, SEVERITY) - self.assertEqual(entry.http_request, { - 'requestMethod': entry_pb.http_request.request_method, - 'requestUrl': entry_pb.http_request.request_url, - 'status': entry_pb.http_request.status, - 'requestSize': str(entry_pb.http_request.request_size), - 'responseSize': str(entry_pb.http_request.response_size), - 'referer': entry_pb.http_request.referer, - 'userAgent': entry_pb.http_request.user_agent, - 'remoteIp': entry_pb.http_request.remote_ip, - 'cacheHit': entry_pb.http_request.cache_hit, - }) - - resource_names, projects, filter_, order_by, page_size, options = ( - gax_api._list_log_entries_called_with) - self.assertEqual(resource_names, []) - self.assertEqual(projects, [self.PROJECT]) - self.assertEqual(filter_, '') - self.assertEqual(order_by, '') - self.assertEqual(page_size, SIZE) - self.assertEqual(options.page_token, TOKEN) - - def test_write_entries_single(self): - from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry - - TEXT = 'TEXT' - ENTRY = { - 'logName': self.LOG_PATH, - 'resource': {'type': 'global'}, - 'textPayload': TEXT, - } - gax_api = _GAXLoggingAPI() - api = self._make_one(gax_api, None) - - api.write_entries([ENTRY]) - - entries, log_name, resource, labels, partial_success, options = ( - gax_api._write_log_entries_called_with) - self.assertEqual(len(entries), 1) - - entry = entries[0] - self.assertIsInstance(entry, LogEntry) - self.assertEqual(entry.log_name, self.LOG_PATH) - self.assertEqual(entry.resource.type, 'global') - self.assertEqual(entry.labels, {}) - self.assertEqual(entry.text_payload, TEXT) - - self.assertIsNone(log_name) - self.assertIsNone(resource) - self.assertIsNone(labels) - self.assertEqual(partial_success, False) - self.assertIsNone(options) - - def test_write_entries_w_extra_properties(self): - # pylint: disable=too-many-statements - from datetime import datetime - from google.logging.type.log_severity_pb2 import WARNING - from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry - from google.cloud._helpers import _datetime_to_rfc3339 - from google.cloud._helpers import UTC, _pb_timestamp_to_datetime - - NOW = datetime.utcnow().replace(tzinfo=UTC) - TEXT = 'TEXT' - SEVERITY = 'WARNING' - LABELS = { - 'foo': 'bar', - } - IID = 'IID' - REQUEST_METHOD = 'GET' - REQUEST_URL = 'http://example.com/requested' - STATUS = 200 - REQUEST_SIZE = 256 - RESPONSE_SIZE = 1024 - REFERRER_URL = 'http://example.com/referer' - USER_AGENT = 'Agent/1.0' - REMOTE_IP = '1.2.3.4' - REQUEST = { - 'requestMethod': REQUEST_METHOD, - 'requestUrl': REQUEST_URL, - 'status': STATUS, - 'requestSize': REQUEST_SIZE, - 'responseSize': RESPONSE_SIZE, - 'referer': REFERRER_URL, - 'userAgent': USER_AGENT, - 'remoteIp': REMOTE_IP, - 'cacheHit': False, - } - PRODUCER = 'PRODUCER' - OPID = 'OPID' - OPERATION = { - 'producer': PRODUCER, - 'id': OPID, - 'first': False, - 'last': True, - } - ENTRY = { - 'logName': self.LOG_PATH, - 'resource': {'type': 'global'}, - 'textPayload': TEXT, - 'severity': SEVERITY, - 'labels': LABELS, - 'insertId': IID, - 'timestamp': _datetime_to_rfc3339(NOW), - 'httpRequest': REQUEST, - 'operation': OPERATION, - } - gax_api = _GAXLoggingAPI() - api = self._make_one(gax_api, None) - - api.write_entries([ENTRY]) - - entries, log_name, resource, labels, partial_success, options = ( - gax_api._write_log_entries_called_with) - self.assertEqual(len(entries), 1) - - entry = entries[0] - self.assertIsInstance(entry, LogEntry) - self.assertEqual(entry.log_name, self.LOG_PATH) - self.assertEqual(entry.resource.type, 'global') - self.assertEqual(entry.text_payload, TEXT) - self.assertEqual(entry.severity, WARNING) - self.assertEqual(entry.labels, LABELS) - self.assertEqual(entry.insert_id, IID) - stamp = _pb_timestamp_to_datetime(entry.timestamp) - self.assertEqual(stamp, NOW) - - request = entry.http_request - self.assertEqual(request.request_method, REQUEST_METHOD) - self.assertEqual(request.request_url, REQUEST_URL) - self.assertEqual(request.status, STATUS) - self.assertEqual(request.request_size, REQUEST_SIZE) - self.assertEqual(request.response_size, RESPONSE_SIZE) - self.assertEqual(request.referer, REFERRER_URL) - self.assertEqual(request.user_agent, USER_AGENT) - self.assertEqual(request.remote_ip, REMOTE_IP) - self.assertEqual(request.cache_hit, False) - - operation = entry.operation - self.assertEqual(operation.producer, PRODUCER) - self.assertEqual(operation.id, OPID) - self.assertFalse(operation.first) - self.assertTrue(operation.last) - - self.assertIsNone(log_name) - self.assertIsNone(resource) - self.assertIsNone(labels) - self.assertEqual(partial_success, False) - self.assertIsNone(options) - # pylint: enable=too-many-statements - - def _write_entries_multiple_helper(self, json_payload, json_struct_pb): - # pylint: disable=too-many-statements - import datetime - from google.logging.type.log_severity_pb2 import WARNING - from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry - from google.protobuf.any_pb2 import Any - from google.cloud._helpers import _datetime_to_rfc3339 - from google.cloud._helpers import UTC - - TEXT = 'TEXT' - NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) - TIMESTAMP_TYPE_URL = 'type.googleapis.com/google.protobuf.Timestamp' - PROTO = { - '@type': TIMESTAMP_TYPE_URL, - 'value': _datetime_to_rfc3339(NOW), - } - PRODUCER = 'PRODUCER' - OPID = 'OPID' - URL = 'http://example.com/' - ENTRIES = [ - {'textPayload': TEXT, - 'severity': WARNING}, - {'jsonPayload': json_payload, - 'operation': {'producer': PRODUCER, 'id': OPID}}, - {'protoPayload': PROTO, - 'httpRequest': {'requestUrl': URL}}, - ] - RESOURCE = { - 'type': 'global', - } - LABELS = { - 'foo': 'bar', - } - gax_api = _GAXLoggingAPI() - api = self._make_one(gax_api, None) - - api.write_entries(ENTRIES, self.LOG_PATH, RESOURCE, LABELS) - - entries, log_name, resource, labels, partial_success, options = ( - gax_api._write_log_entries_called_with) - self.assertEqual(len(entries), len(ENTRIES)) - - entry = entries[0] - self.assertIsInstance(entry, LogEntry) - self.assertEqual(entry.log_name, '') - self.assertEqual(entry.resource.type, '') - self.assertEqual(entry.labels, {}) - self.assertEqual(entry.text_payload, TEXT) - self.assertEqual(entry.severity, WARNING) - - entry = entries[1] - self.assertIsInstance(entry, LogEntry) - self.assertEqual(entry.log_name, '') - self.assertEqual(entry.resource.type, '') - self.assertEqual(entry.labels, {}) - self.assertEqual(entry.json_payload, json_struct_pb) - operation = entry.operation - self.assertEqual(operation.producer, PRODUCER) - self.assertEqual(operation.id, OPID) - - entry = entries[2] - self.assertIsInstance(entry, LogEntry) - self.assertEqual(entry.log_name, '') - self.assertEqual(entry.resource.type, '') - self.assertEqual(entry.labels, {}) - proto = entry.proto_payload - self.assertIsInstance(proto, Any) - self.assertEqual(proto.type_url, TIMESTAMP_TYPE_URL) - request = entry.http_request - self.assertEqual(request.request_url, URL) - - self.assertEqual(log_name, self.LOG_PATH) - self.assertEqual(resource, RESOURCE) - self.assertEqual(labels, LABELS) - self.assertEqual(partial_success, False) - self.assertIsNone(options) - # pylint: enable=too-many-statements - - def test_write_entries_multiple(self): - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value - - json_payload = {'payload': 'PAYLOAD', 'type': 'json'} - json_struct_pb = Struct(fields={ - key: Value(string_value=value) - for key, value in json_payload.items() - }) - self._write_entries_multiple_helper(json_payload, json_struct_pb) - - def test_write_entries_multiple_nested_payload(self): - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value - - json_payload = {} - struct_fields = {} - # Add a simple key. - key = 'hello' - json_payload[key] = 'me you looking for' - struct_fields[key] = Value(string_value=json_payload[key]) - # Add a nested key. - key = 'everything' - sub_value = {} - sub_fields = {} - sub_key = 'answer' - sub_value[sub_key] = 42 - sub_fields[sub_key] = Value(number_value=sub_value[sub_key]) - sub_key = 'really?' - sub_value[sub_key] = False - sub_fields[sub_key] = Value(bool_value=sub_value[sub_key]) - # Update the parent payload. - json_payload[key] = sub_value - struct_fields[key] = Value(struct_value=Struct(fields=sub_fields)) - # Make the struct_pb for our dict. - json_struct_pb = Struct(fields=struct_fields) - self._write_entries_multiple_helper(json_payload, json_struct_pb) - - def test_logger_delete(self): - gax_api = _GAXLoggingAPI() - api = self._make_one(gax_api, None) - - api.logger_delete(self.PROJECT, self.LOG_NAME) - - log_name, options = gax_api._delete_log_called_with - self.assertEqual(log_name, self.LOG_PATH) - self.assertIsNone(options) - - def test_logger_delete_not_found(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXLoggingAPI(_delete_not_found=True) - api = self._make_one(gax_api, None) - - with self.assertRaises(NotFound): - api.logger_delete(self.PROJECT, self.LOG_NAME) - - log_name, options = gax_api._delete_log_called_with - self.assertEqual(log_name, self.LOG_PATH) - self.assertIsNone(options) - - def test_logger_delete_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXLoggingAPI(_random_gax_error=True) - api = self._make_one(gax_api, None) - - with self.assertRaises(GaxError): - api.logger_delete(self.PROJECT, self.LOG_NAME) - - log_name, options = gax_api._delete_log_called_with - self.assertEqual(log_name, self.LOG_PATH) - self.assertIsNone(options) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test_SinksAPI(_Base, unittest.TestCase): - SINK_NAME = 'sink_name' - SINK_PATH = 'projects/%s/sinks/%s' % (_Base.PROJECT, SINK_NAME) - DESTINATION_URI = 'faux.googleapis.com/destination' - SINK_WRITER_IDENTITY = 'serviceAccount:project-123@example.com' - - @staticmethod - def _get_target_class(): - from google.cloud.logging._gax import _SinksAPI - - return _SinksAPI - - def test_ctor(self): - gax_api = _GAXSinksAPI() - client = object() - api = self._make_one(gax_api, client) - self.assertIs(api._gax_api, gax_api) - self.assertIs(api._client, client) - - def test_list_sinks_no_paging(self): - import six - from google.gax import INITIAL_PAGE - from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink - from google.cloud._testing import _GAXPageIterator - from google.cloud.logging.sink import Sink - - TOKEN = 'TOKEN' - sink_pb = LogSink(name=self.SINK_PATH, - destination=self.DESTINATION_URI, - filter=self.FILTER) - response = _GAXPageIterator([sink_pb], page_token=TOKEN) - gax_api = _GAXSinksAPI(_list_sinks_response=response) - client = object() - api = self._make_one(gax_api, client) - - iterator = api.list_sinks(self.PROJECT) - page = six.next(iterator.pages) - sinks = list(page) - token = iterator.next_page_token - - # First check the token. - self.assertEqual(token, TOKEN) - # Then check the sinks returned. - self.assertEqual(len(sinks), 1) - sink = sinks[0] - self.assertIsInstance(sink, Sink) - self.assertEqual(sink.name, self.SINK_PATH) - self.assertEqual(sink.filter_, self.FILTER) - self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertIs(sink.client, client) - - project, page_size, options = gax_api._list_sinks_called_with - self.assertEqual(project, self.PROJECT_PATH) - self.assertEqual(page_size, 0) - self.assertEqual(options.page_token, INITIAL_PAGE) - - def test_list_sinks_w_paging(self): - from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink - from google.cloud._testing import _GAXPageIterator - from google.cloud.logging.sink import Sink - - TOKEN = 'TOKEN' - PAGE_SIZE = 42 - sink_pb = LogSink(name=self.SINK_PATH, - destination=self.DESTINATION_URI, - filter=self.FILTER) - response = _GAXPageIterator([sink_pb]) - gax_api = _GAXSinksAPI(_list_sinks_response=response) - client = object() - api = self._make_one(gax_api, client) - - iterator = api.list_sinks( - self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) - sinks = list(iterator) - token = iterator.next_page_token - - # First check the token. - self.assertIsNone(token) - # Then check the sinks returned. - self.assertEqual(len(sinks), 1) - sink = sinks[0] - self.assertIsInstance(sink, Sink) - self.assertEqual(sink.name, self.SINK_PATH) - self.assertEqual(sink.filter_, self.FILTER) - self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertIs(sink.client, client) - - project, page_size, options = gax_api._list_sinks_called_with - self.assertEqual(project, self.PROJECT_PATH) - self.assertEqual(page_size, PAGE_SIZE) - self.assertEqual(options.page_token, TOKEN) - - def test_sink_create_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSinksAPI(_random_gax_error=True) - api = self._make_one(gax_api, None) - - with self.assertRaises(GaxError): - api.sink_create( - self.PROJECT, self.SINK_NAME, self.FILTER, - self.DESTINATION_URI) - - def test_sink_create_conflict(self): - from google.cloud.exceptions import Conflict - from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink - - gax_api = _GAXSinksAPI(_create_sink_conflict=True) - api = self._make_one(gax_api, None) - - with self.assertRaises(Conflict): - api.sink_create( - self.PROJECT, self.SINK_NAME, self.FILTER, - self.DESTINATION_URI) - - parent, sink, unique_writer_identity, options = ( - gax_api._create_sink_called_with) - self.assertEqual(parent, self.PROJECT_PATH) - self.assertIsInstance(sink, LogSink) - self.assertEqual(sink.name, self.SINK_NAME) - self.assertEqual(sink.filter, self.FILTER) - self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertIsNone(options) - self.assertFalse(unique_writer_identity) - - def test_sink_create_ok(self): - from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink - - gax_api = _GAXSinksAPI() - gax_api._create_sink_response = LogSink( - name=self.SINK_NAME, - destination=self.DESTINATION_URI, - filter=self.FILTER, - writer_identity=self.SINK_WRITER_IDENTITY, - ) - api = self._make_one(gax_api, None) - - returned = api.sink_create( - self.PROJECT, - self.SINK_NAME, - self.FILTER, - self.DESTINATION_URI, - unique_writer_identity=True, - ) - - self.assertEqual(returned, { - 'name': self.SINK_NAME, - 'filter': self.FILTER, - 'destination': self.DESTINATION_URI, - 'writerIdentity': self.SINK_WRITER_IDENTITY, - }) - - parent, sink, unique_writer_identity, options = ( - gax_api._create_sink_called_with) - self.assertEqual(parent, self.PROJECT_PATH) - self.assertIsInstance(sink, LogSink) - self.assertEqual(sink.name, self.SINK_NAME) - self.assertEqual(sink.filter, self.FILTER) - self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertTrue(unique_writer_identity) - self.assertIsNone(options) - - def test_sink_get_error(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSinksAPI() - api = self._make_one(gax_api, None) - - with self.assertRaises(NotFound): - api.sink_get(self.PROJECT, self.SINK_NAME) - - def test_sink_get_miss(self): - from google.gax.errors import GaxError - - gax_api = _GAXSinksAPI(_random_gax_error=True) - api = self._make_one(gax_api, None) - - with self.assertRaises(GaxError): - api.sink_get(self.PROJECT, self.SINK_NAME) - - def test_sink_get_hit(self): - from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink - - RESPONSE = { - 'name': self.SINK_PATH, - 'filter': self.FILTER, - 'destination': self.DESTINATION_URI, - } - sink_pb = LogSink(name=self.SINK_PATH, - destination=self.DESTINATION_URI, - filter=self.FILTER) - gax_api = _GAXSinksAPI(_get_sink_response=sink_pb) - api = self._make_one(gax_api, None) - - response = api.sink_get(self.PROJECT, self.SINK_NAME) - - self.assertEqual(response, RESPONSE) - - sink_name, options = gax_api._get_sink_called_with - self.assertEqual(sink_name, self.SINK_PATH) - self.assertIsNone(options) - - def test_sink_update_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSinksAPI(_random_gax_error=True) - api = self._make_one(gax_api, None) - - with self.assertRaises(GaxError): - api.sink_update( - self.PROJECT, self.SINK_NAME, self.FILTER, - self.DESTINATION_URI) - - def test_sink_update_miss(self): - from google.cloud.exceptions import NotFound - from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink - - gax_api = _GAXSinksAPI() - api = self._make_one(gax_api, None) - - with self.assertRaises(NotFound): - api.sink_update( - self.PROJECT, self.SINK_NAME, self.FILTER, - self.DESTINATION_URI) - - sink_name, sink, unique_writer_identity, options = ( - gax_api._update_sink_called_with) - self.assertEqual(sink_name, self.SINK_PATH) - self.assertIsInstance(sink, LogSink) - self.assertEqual(sink.name, self.SINK_PATH) - self.assertEqual(sink.filter, self.FILTER) - self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertFalse(unique_writer_identity) - self.assertIsNone(options) - - def test_sink_update_hit(self): - from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink - - response = LogSink( - name=self.SINK_NAME, - destination=self.DESTINATION_URI, - filter=self.FILTER, - writer_identity=Test_SinksAPI.SINK_WRITER_IDENTITY, - ) - gax_api = _GAXSinksAPI(_update_sink_response=response) - api = self._make_one(gax_api, None) - - returned = api.sink_update( - self.PROJECT, - self.SINK_NAME, - self.FILTER, - self.DESTINATION_URI, - unique_writer_identity=True) - - self.assertEqual(returned, { - 'name': self.SINK_NAME, - 'filter': self.FILTER, - 'destination': self.DESTINATION_URI, - 'writerIdentity': self.SINK_WRITER_IDENTITY, - }) - - sink_name, sink, unique_writer_identity, options = ( - gax_api._update_sink_called_with) - self.assertEqual(sink_name, self.SINK_PATH) - self.assertIsInstance(sink, LogSink) - self.assertEqual(sink.name, self.SINK_PATH) - self.assertEqual(sink.filter, self.FILTER) - self.assertEqual(sink.destination, self.DESTINATION_URI) - self.assertTrue(unique_writer_identity) - self.assertIsNone(options) - - def test_sink_delete_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSinksAPI(_random_gax_error=True) - api = self._make_one(gax_api, None) - - with self.assertRaises(GaxError): - api.sink_delete(self.PROJECT, self.SINK_NAME) - - def test_sink_delete_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSinksAPI(_sink_not_found=True) - api = self._make_one(gax_api, None) - - with self.assertRaises(NotFound): - api.sink_delete(self.PROJECT, self.SINK_NAME) - - def test_sink_delete_hit(self): - gax_api = _GAXSinksAPI() - api = self._make_one(gax_api, None) - - api.sink_delete(self.PROJECT, self.SINK_NAME) - - sink_name, options = gax_api._delete_sink_called_with - self.assertEqual(sink_name, self.SINK_PATH) - self.assertIsNone(options) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test_MetricsAPI(_Base, unittest.TestCase): - METRIC_NAME = 'metric_name' - METRIC_PATH = 'projects/%s/metrics/%s' % (_Base.PROJECT, METRIC_NAME) - DESCRIPTION = 'Description' - - @staticmethod - def _get_target_class(): - from google.cloud.logging._gax import _MetricsAPI - - return _MetricsAPI - - def test_ctor(self): - gax_api = _GAXMetricsAPI() - api = self._make_one(gax_api, None) - self.assertIs(api._gax_api, gax_api) - - def test_list_metrics_no_paging(self): - import six - from google.gax import INITIAL_PAGE - from google.cloud.proto.logging.v2.logging_metrics_pb2 import LogMetric - from google.cloud._testing import _GAXPageIterator - from google.cloud.logging.metric import Metric - - TOKEN = 'TOKEN' - metric_pb = LogMetric(name=self.METRIC_PATH, - description=self.DESCRIPTION, - filter=self.FILTER) - response = _GAXPageIterator([metric_pb], page_token=TOKEN) - gax_api = _GAXMetricsAPI(_list_log_metrics_response=response) - client = object() - api = self._make_one(gax_api, client) - - iterator = api.list_metrics(self.PROJECT) - page = six.next(iterator.pages) - metrics = list(page) - token = iterator.next_page_token - - # First check the token. - self.assertEqual(token, TOKEN) - # Then check the metrics returned. - self.assertEqual(len(metrics), 1) - metric = metrics[0] - self.assertIsInstance(metric, Metric) - self.assertEqual(metric.name, self.METRIC_PATH) - self.assertEqual(metric.filter_, self.FILTER) - self.assertEqual(metric.description, self.DESCRIPTION) - self.assertIs(metric.client, client) - - project, page_size, options = gax_api._list_log_metrics_called_with - self.assertEqual(project, self.PROJECT_PATH) - self.assertEqual(page_size, 0) - self.assertEqual(options.page_token, INITIAL_PAGE) - - def test_list_metrics_w_paging(self): - from google.cloud.proto.logging.v2.logging_metrics_pb2 import LogMetric - from google.cloud._testing import _GAXPageIterator - from google.cloud.logging.metric import Metric - - TOKEN = 'TOKEN' - PAGE_SIZE = 42 - metric_pb = LogMetric(name=self.METRIC_PATH, - description=self.DESCRIPTION, - filter=self.FILTER) - response = _GAXPageIterator([metric_pb]) - gax_api = _GAXMetricsAPI(_list_log_metrics_response=response) - client = object() - api = self._make_one(gax_api, client) - - iterator = api.list_metrics( - self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) - metrics = list(iterator) - token = iterator.next_page_token - - # First check the token. - self.assertIsNone(token) - # Then check the metrics returned. - self.assertEqual(len(metrics), 1) - metric = metrics[0] - self.assertIsInstance(metric, Metric) - self.assertEqual(metric.name, self.METRIC_PATH) - self.assertEqual(metric.filter_, self.FILTER) - self.assertEqual(metric.description, self.DESCRIPTION) - self.assertIs(metric.client, client) - - project, page_size, options = gax_api._list_log_metrics_called_with - self.assertEqual(project, self.PROJECT_PATH) - self.assertEqual(page_size, PAGE_SIZE) - self.assertEqual(options.page_token, TOKEN) - - def test_metric_create_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXMetricsAPI(_random_gax_error=True) - api = self._make_one(gax_api, None) - - with self.assertRaises(GaxError): - api.metric_create( - self.PROJECT, self.METRIC_NAME, self.FILTER, - self.DESCRIPTION) - - def test_metric_create_conflict(self): - from google.cloud.exceptions import Conflict - - gax_api = _GAXMetricsAPI(_create_log_metric_conflict=True) - api = self._make_one(gax_api, None) - - with self.assertRaises(Conflict): - api.metric_create( - self.PROJECT, self.METRIC_NAME, self.FILTER, - self.DESCRIPTION) - - def test_metric_create_ok(self): - from google.cloud.proto.logging.v2.logging_metrics_pb2 import LogMetric - - gax_api = _GAXMetricsAPI() - api = self._make_one(gax_api, None) - - api.metric_create( - self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION) - - parent, metric, options = ( - gax_api._create_log_metric_called_with) - self.assertEqual(parent, self.PROJECT_PATH) - self.assertIsInstance(metric, LogMetric) - self.assertEqual(metric.name, self.METRIC_NAME) - self.assertEqual(metric.filter, self.FILTER) - self.assertEqual(metric.description, self.DESCRIPTION) - self.assertIsNone(options) - - def test_metric_get_error(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXMetricsAPI() - api = self._make_one(gax_api, None) - - with self.assertRaises(NotFound): - api.metric_get(self.PROJECT, self.METRIC_NAME) - - def test_metric_get_miss(self): - from google.gax.errors import GaxError - - gax_api = _GAXMetricsAPI(_random_gax_error=True) - api = self._make_one(gax_api, None) - - with self.assertRaises(GaxError): - api.metric_get(self.PROJECT, self.METRIC_NAME) - - def test_metric_get_hit(self): - from google.cloud.proto.logging.v2.logging_metrics_pb2 import LogMetric - - RESPONSE = { - 'name': self.METRIC_PATH, - 'filter': self.FILTER, - 'description': self.DESCRIPTION, - } - metric_pb = LogMetric(name=self.METRIC_PATH, - description=self.DESCRIPTION, - filter=self.FILTER) - gax_api = _GAXMetricsAPI(_get_log_metric_response=metric_pb) - api = self._make_one(gax_api, None) - - response = api.metric_get(self.PROJECT, self.METRIC_NAME) - - self.assertEqual(response, RESPONSE) - - metric_name, options = gax_api._get_log_metric_called_with - self.assertEqual(metric_name, self.METRIC_PATH) - self.assertIsNone(options) - - def test_metric_update_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXMetricsAPI(_random_gax_error=True) - api = self._make_one(gax_api, None) - - with self.assertRaises(GaxError): - api.metric_update( - self.PROJECT, self.METRIC_NAME, self.FILTER, - self.DESCRIPTION) - - def test_metric_update_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXMetricsAPI() - api = self._make_one(gax_api, None) - - with self.assertRaises(NotFound): - api.metric_update( - self.PROJECT, self.METRIC_NAME, self.FILTER, - self.DESCRIPTION) - - def test_metric_update_hit(self): - from google.cloud.proto.logging.v2.logging_metrics_pb2 import LogMetric - - response = LogMetric(name=self.METRIC_NAME, - description=self.DESCRIPTION, - filter=self.FILTER) - gax_api = _GAXMetricsAPI(_update_log_metric_response=response) - api = self._make_one(gax_api, None) - - api.metric_update( - self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION) - - metric_name, metric, options = ( - gax_api._update_log_metric_called_with) - self.assertEqual(metric_name, self.METRIC_PATH) - self.assertIsInstance(metric, LogMetric) - self.assertEqual(metric.name, self.METRIC_PATH) - self.assertEqual(metric.filter, self.FILTER) - self.assertEqual(metric.description, self.DESCRIPTION) - self.assertIsNone(options) - - def test_metric_delete_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXMetricsAPI(_random_gax_error=True) - api = self._make_one(gax_api, None) - - with self.assertRaises(GaxError): - api.metric_delete(self.PROJECT, self.METRIC_NAME) - - def test_metric_delete_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXMetricsAPI(_log_metric_not_found=True) - api = self._make_one(gax_api, None) - - with self.assertRaises(NotFound): - api.metric_delete(self.PROJECT, self.METRIC_NAME) - - def test_metric_delete_hit(self): - gax_api = _GAXMetricsAPI() - api = self._make_one(gax_api, None) - - api.metric_delete(self.PROJECT, self.METRIC_NAME) - - metric_name, options = gax_api._delete_log_metric_called_with - self.assertEqual(metric_name, self.METRIC_PATH) - self.assertIsNone(options) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test__parse_log_entry(unittest.TestCase): - - @staticmethod - def _call_fut(*args, **kwargs): - from google.cloud.logging._gax import _parse_log_entry - - return _parse_log_entry(*args, **kwargs) - - def test_simple(self): - from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry - - entry_pb = LogEntry(log_name=u'lol-jk', text_payload=u'bah humbug') - result = self._call_fut(entry_pb) - expected = { - 'logName': entry_pb.log_name, - 'textPayload': entry_pb.text_payload, - } - self.assertEqual(result, expected) - - @mock.patch('google.cloud.logging._gax.MessageToDict', - side_effect=TypeError) - def test_non_registry_failure(self, msg_to_dict_mock): - entry_pb = mock.Mock(spec=['HasField']) - entry_pb.HasField.return_value = False - with self.assertRaises(TypeError): - self._call_fut(entry_pb) - - entry_pb.HasField.assert_called_once_with('proto_payload') - msg_to_dict_mock.assert_called_once_with(entry_pb) - - def test_unregistered_type(self): - from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry - from google.protobuf import any_pb2 - from google.protobuf import descriptor_pool - from google.protobuf.timestamp_pb2 import Timestamp - - pool = descriptor_pool.Default() - type_name = 'google.bigtable.admin.v2.UpdateClusterMetadata' - # Make sure the descriptor is not known in the registry. - with self.assertRaises(KeyError): - pool.FindMessageTypeByName(type_name) - - type_url = 'type.googleapis.com/' + type_name - metadata_bytes = ( - b'\n\n\n\x03foo\x12\x03bar\x12\x06\x08\xbd\xb6\xfb\xc6\x05') - any_pb = any_pb2.Any(type_url=type_url, value=metadata_bytes) - timestamp = Timestamp(seconds=61, nanos=1234000) - - entry_pb = LogEntry(proto_payload=any_pb, timestamp=timestamp) - result = self._call_fut(entry_pb) - self.assertEqual(len(result), 2) - self.assertEqual(result['timestamp'], '1970-01-01T00:01:01.001234Z') - # NOTE: This "hack" is needed on Windows, where the equality check - # for an ``Any`` instance fails on unregistered types. - self.assertEqual(result['protoPayload'].type_url, type_url) - self.assertEqual(result['protoPayload'].value, metadata_bytes) - - def test_registered_type(self): - from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry - from google.protobuf import any_pb2 - from google.protobuf import descriptor_pool - from google.protobuf.struct_pb2 import Struct - from google.protobuf.struct_pb2 import Value - - pool = descriptor_pool.Default() - type_name = 'google.protobuf.Struct' - # Make sure the descriptor is known in the registry. - descriptor = pool.FindMessageTypeByName(type_name) - self.assertEqual(descriptor.name, 'Struct') - - type_url = 'type.googleapis.com/' + type_name - field_name = 'foo' - field_value = u'Bar' - struct_pb = Struct( - fields={field_name: Value(string_value=field_value)}) - any_pb = any_pb2.Any( - type_url=type_url, - value=struct_pb.SerializeToString(), - ) - - entry_pb = LogEntry(proto_payload=any_pb, log_name=u'all-good') - result = self._call_fut(entry_pb) - expected_proto = { - 'logName': entry_pb.log_name, - 'protoPayload': { - '@type': type_url, - 'value': {field_name: field_value}, - }, - } - self.assertEqual(result, expected_proto) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test__log_entry_mapping_to_pb(unittest.TestCase): - - @staticmethod - def _call_fut(*args, **kwargs): - from google.cloud.logging._gax import _log_entry_mapping_to_pb - - return _log_entry_mapping_to_pb(*args, **kwargs) - - def test_simple(self): - from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry - - result = self._call_fut({}) - self.assertEqual(result, LogEntry()) - - def test_unregistered_type(self): - from google.protobuf import descriptor_pool - from google.protobuf.json_format import ParseError - - pool = descriptor_pool.Default() - type_name = 'google.bigtable.admin.v2.UpdateClusterMetadata' - # Make sure the descriptor is not known in the registry. - with self.assertRaises(KeyError): - pool.FindMessageTypeByName(type_name) - - type_url = 'type.googleapis.com/' + type_name - json_mapping = { - 'protoPayload': { - '@type': type_url, - 'originalRequest': { - 'name': 'foo', - 'location': 'bar', - }, - 'requestTime': { - 'seconds': 1491000125, - }, - }, - } - with self.assertRaises(ParseError): - self._call_fut(json_mapping) - - def test_registered_type(self): - from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry - from google.protobuf import any_pb2 - from google.protobuf import descriptor_pool - - pool = descriptor_pool.Default() - type_name = 'google.protobuf.Struct' - # Make sure the descriptor is known in the registry. - descriptor = pool.FindMessageTypeByName(type_name) - self.assertEqual(descriptor.name, 'Struct') - - type_url = 'type.googleapis.com/' + type_name - field_name = 'foo' - field_value = u'Bar' - json_mapping = { - 'logName': u'hi-everybody', - 'protoPayload': { - '@type': type_url, - 'value': {field_name: field_value}, - }, - } - # Convert to a valid LogEntry. - result = self._call_fut(json_mapping) - entry_pb = LogEntry( - log_name=json_mapping['logName'], - proto_payload=any_pb2.Any( - type_url=type_url, - value=b'\n\014\n\003foo\022\005\032\003Bar', - ), - ) - self.assertEqual(result, entry_pb) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test_make_gax_logging_api(unittest.TestCase): - - def _call_fut(self, client): - from google.cloud.logging._gax import make_gax_logging_api - - return make_gax_logging_api(client) - - def test_it(self): - from google.cloud.logging import __version__ - from google.cloud.logging._gax import _LoggingAPI - from google.cloud.logging._gax import DEFAULT_USER_AGENT - - creds = object() - client = mock.Mock(_credentials=creds, spec=['_credentials']) - channels = [] - channel_args = [] - generated_api_kwargs = [] - channel_obj = object() - generated = object() - - def make_channel(*args): - channel_args.append(args) - return channel_obj - - def generated_api(channel=None, **kwargs): - channels.append(channel) - generated_api_kwargs.append(kwargs) - return generated - - host = 'foo.apis.invalid' - generated_api.SERVICE_ADDRESS = host - - patch = mock.patch.multiple( - 'google.cloud.logging._gax', - LoggingServiceV2Client=generated_api, - make_secure_channel=make_channel) - with patch: - logging_api = self._call_fut(client) - - self.assertEqual(channels, [channel_obj]) - self.assertEqual(channel_args, - [(creds, DEFAULT_USER_AGENT, host)]) - - self.assertEqual(len(generated_api_kwargs), 1) - self.assertEqual(generated_api_kwargs[0]['lib_name'], 'gccl') - self.assertEqual(generated_api_kwargs[0]['lib_version'], __version__) - - self.assertIsInstance(logging_api, _LoggingAPI) - self.assertIs(logging_api._gax_api, generated) - self.assertIs(logging_api._client, client) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test_make_gax_metrics_api(unittest.TestCase): - - def _call_fut(self, client): - from google.cloud.logging._gax import make_gax_metrics_api - - return make_gax_metrics_api(client) - - def test_it(self): - from google.cloud.logging import __version__ - from google.cloud.logging._gax import _MetricsAPI - from google.cloud.logging._gax import DEFAULT_USER_AGENT - - creds = object() - client = mock.Mock(_credentials=creds, spec=['_credentials']) - channels = [] - channel_args = [] - generated_api_kwargs = [] - channel_obj = object() - generated = object() - - def make_channel(*args): - channel_args.append(args) - return channel_obj - - def generated_api(channel=None, **kwargs): - channels.append(channel) - generated_api_kwargs.append(kwargs) - return generated - - host = 'foo.apis.invalid' - generated_api.SERVICE_ADDRESS = host - - patch = mock.patch.multiple( - 'google.cloud.logging._gax', - MetricsServiceV2Client=generated_api, - make_secure_channel=make_channel) - with patch: - metrics_api = self._call_fut(client) - - self.assertEqual(channels, [channel_obj]) - self.assertEqual(channel_args, - [(creds, DEFAULT_USER_AGENT, host)]) - - self.assertEqual(len(generated_api_kwargs), 1) - self.assertEqual(generated_api_kwargs[0]['lib_name'], 'gccl') - self.assertEqual(generated_api_kwargs[0]['lib_version'], __version__) - - self.assertIsInstance(metrics_api, _MetricsAPI) - self.assertIs(metrics_api._gax_api, generated) - self.assertIs(metrics_api._client, client) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test_make_gax_sinks_api(unittest.TestCase): - - def _call_fut(self, client): - from google.cloud.logging._gax import make_gax_sinks_api - - return make_gax_sinks_api(client) - - def test_it(self): - from google.cloud.logging import __version__ - from google.cloud.logging._gax import _SinksAPI - from google.cloud.logging._gax import DEFAULT_USER_AGENT - - creds = object() - client = mock.Mock(_credentials=creds, spec=['_credentials']) - channels = [] - channel_args = [] - generated_api_kwargs = [] - channel_obj = object() - generated = object() - - def make_channel(*args): - channel_args.append(args) - return channel_obj - - def generated_api(channel=None, **kwargs): - channels.append(channel) - generated_api_kwargs.append(kwargs) - return generated - - host = 'foo.apis.invalid' - generated_api.SERVICE_ADDRESS = host - - patch = mock.patch.multiple( - 'google.cloud.logging._gax', - ConfigServiceV2Client=generated_api, - make_secure_channel=make_channel) - with patch: - sinks_api = self._call_fut(client) - - self.assertEqual(channels, [channel_obj]) - self.assertEqual(channel_args, - [(creds, DEFAULT_USER_AGENT, host)]) - - self.assertEqual(len(generated_api_kwargs), 1) - self.assertEqual(generated_api_kwargs[0]['lib_name'], 'gccl') - self.assertEqual(generated_api_kwargs[0]['lib_version'], __version__) - - self.assertIsInstance(sinks_api, _SinksAPI) - self.assertIs(sinks_api._gax_api, generated) - self.assertIs(sinks_api._client, client) - - -class _GAXLoggingAPI(_GAXBaseAPI): - - _delete_not_found = False - - def list_log_entries( - self, resource_names, project_ids, filter_, - order_by, page_size, options): - self._list_log_entries_called_with = ( - resource_names, project_ids, filter_, - order_by, page_size, options) - return self._list_log_entries_response - - def write_log_entries(self, entries, log_name, resource, labels, - partial_success, options): - self._write_log_entries_called_with = ( - entries, log_name, resource, labels, partial_success, options) - - def delete_log(self, log_name, options): - from google.gax.errors import GaxError - - self._delete_log_called_with = log_name, options - if self._random_gax_error: - raise GaxError('error') - if self._delete_not_found: - raise GaxError('notfound', self._make_grpc_not_found()) - - -class _GAXSinksAPI(_GAXBaseAPI): - - _create_sink_conflict = False - _sink_not_found = False - - def list_sinks(self, parent, page_size, options): - self._list_sinks_called_with = parent, page_size, options - return self._list_sinks_response - - def create_sink(self, parent, sink, unique_writer_identity, options): - from google.gax.errors import GaxError - from google.cloud.proto.logging.v2.logging_config_pb2 import LogSink - - self._create_sink_called_with = ( - parent, sink, unique_writer_identity, options) - if self._random_gax_error: - raise GaxError('error') - if self._create_sink_conflict: - raise GaxError('conflict', self._make_grpc_failed_precondition()) - return self._create_sink_response - - def get_sink(self, sink_name, options): - from google.gax.errors import GaxError - - self._get_sink_called_with = sink_name, options - if self._random_gax_error: - raise GaxError('error') - try: - return self._get_sink_response - except AttributeError: - raise GaxError('notfound', self._make_grpc_not_found()) - - def update_sink(self, sink_name, sink, unique_writer_identity, options): - from google.gax.errors import GaxError - - self._update_sink_called_with = ( - sink_name, sink, unique_writer_identity, options) - if self._random_gax_error: - raise GaxError('error') - try: - return self._update_sink_response - except AttributeError: - raise GaxError('notfound', self._make_grpc_not_found()) - - def delete_sink(self, sink_name, options=None): - from google.gax.errors import GaxError - - self._delete_sink_called_with = sink_name, options - if self._random_gax_error: - raise GaxError('error') - if self._sink_not_found: - raise GaxError('notfound', self._make_grpc_not_found()) - - -class _GAXMetricsAPI(_GAXBaseAPI): - - _create_log_metric_conflict = False - _log_metric_not_found = False - - def list_log_metrics(self, parent, page_size, options): - self._list_log_metrics_called_with = parent, page_size, options - return self._list_log_metrics_response - - def create_log_metric(self, parent, metric, options): - from google.gax.errors import GaxError - - self._create_log_metric_called_with = parent, metric, options - if self._random_gax_error: - raise GaxError('error') - if self._create_log_metric_conflict: - raise GaxError('conflict', self._make_grpc_failed_precondition()) - - def get_log_metric(self, metric_name, options): - from google.gax.errors import GaxError - - self._get_log_metric_called_with = metric_name, options - if self._random_gax_error: - raise GaxError('error') - try: - return self._get_log_metric_response - except AttributeError: - raise GaxError('notfound', self._make_grpc_not_found()) - - def update_log_metric(self, metric_name, metric, options=None): - from google.gax.errors import GaxError - - self._update_log_metric_called_with = metric_name, metric, options - if self._random_gax_error: - raise GaxError('error') - try: - return self._update_log_metric_response - except AttributeError: - raise GaxError('notfound', self._make_grpc_not_found()) - - def delete_log_metric(self, metric_name, options=None): - from google.gax.errors import GaxError - - self._delete_log_metric_called_with = metric_name, options - if self._random_gax_error: - raise GaxError('error') - if self._log_metric_not_found: - raise GaxError('notfound', self._make_grpc_not_found()) diff --git a/logging/tests/unit/test_client.py b/logging/tests/unit/test_client.py index 312f933cad6f..77c885eda18e 100644 --- a/logging/tests/unit/test_client.py +++ b/logging/tests/unit/test_client.py @@ -48,7 +48,7 @@ def test_ctor(self): client = self._make_one(project=self.PROJECT, credentials=creds) self.assertEqual(client.project, self.PROJECT) - def test_logging_api_wo_gax(self): + def test_logging_api_wo_gapi(self): from google.cloud.logging._http import _LoggingAPI client = self._make_one(self.PROJECT, @@ -64,7 +64,7 @@ def test_logging_api_wo_gax(self): again = client.logging_api self.assertIs(again, api) - def test_logging_api_w_gax(self): + def test_logging_api_w_gapic(self): clients = [] api_obj = object() @@ -76,10 +76,9 @@ def make_api(client_obj): client = self._make_one(project=self.PROJECT, credentials=creds, _use_grpc=True) - patch = mock.patch( - 'google.cloud.logging.client.make_gax_logging_api', - new=make_api) - with patch: + patch = mock.patch('google.cloud.logging.client._gapic') + with patch as gapic_module: + gapic_module.make_logging_api.side_effect = make_api api = client.logging_api self.assertIs(api, api_obj) @@ -88,7 +87,7 @@ def make_api(client_obj): again = client.logging_api self.assertIs(again, api) - def test_no_gax_ctor(self): + def test_no_gapic_ctor(self): from google.cloud.logging._http import _LoggingAPI creds = _make_credentials() @@ -102,7 +101,7 @@ def test_no_gax_ctor(self): api = client.logging_api self.assertIsInstance(api, _LoggingAPI) - def test_sinks_api_wo_gax(self): + def test_sinks_api_wo_gapic(self): from google.cloud.logging._http import _SinksAPI client = self._make_one( @@ -118,7 +117,7 @@ def test_sinks_api_wo_gax(self): again = client.sinks_api self.assertIs(again, api) - def test_sinks_api_w_gax(self): + def test_sinks_api_w_gapic(self): clients = [] api_obj = object() @@ -130,10 +129,9 @@ def make_api(client_obj): client = self._make_one(project=self.PROJECT, credentials=creds, _use_grpc=True) - patch = mock.patch( - 'google.cloud.logging.client.make_gax_sinks_api', - new=make_api) - with patch: + patch = mock.patch('google.cloud.logging.client._gapic') + with patch as gapic_module: + gapic_module.make_sinks_api.side_effect = make_api api = client.sinks_api self.assertIs(api, api_obj) @@ -142,7 +140,7 @@ def make_api(client_obj): again = client.sinks_api self.assertIs(again, api) - def test_metrics_api_wo_gax(self): + def test_metrics_api_wo_gapic(self): from google.cloud.logging._http import _MetricsAPI client = self._make_one( @@ -158,7 +156,7 @@ def test_metrics_api_wo_gax(self): again = client.metrics_api self.assertIs(again, api) - def test_metrics_api_w_gax(self): + def test_metrics_api_w_gapic(self): clients = [] api_obj = object() @@ -170,10 +168,9 @@ def make_api(client_obj): client = self._make_one(project=self.PROJECT, credentials=creds, _use_grpc=True) - patch = mock.patch( - 'google.cloud.logging.client.make_gax_metrics_api', - new=make_api) - with patch: + patch = mock.patch('google.cloud.logging.client._gapic') + with patch as gapic_module: + gapic_module.make_metrics_api.side_effect = make_api api = client.metrics_api self.assertIs(api, api_obj) From f8f3fbcf93134f9a8a18bf814e5973f761b7ab50 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 18 Jan 2018 12:12:16 -0800 Subject: [PATCH 3/5] Fix typo --- logging/tests/unit/test_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/logging/tests/unit/test_client.py b/logging/tests/unit/test_client.py index 77c885eda18e..9636e8ff6954 100644 --- a/logging/tests/unit/test_client.py +++ b/logging/tests/unit/test_client.py @@ -48,7 +48,7 @@ def test_ctor(self): client = self._make_one(project=self.PROJECT, credentials=creds) self.assertEqual(client.project, self.PROJECT) - def test_logging_api_wo_gapi(self): + def test_logging_api_wo_gapic(self): from google.cloud.logging._http import _LoggingAPI client = self._make_one(self.PROJECT, From 81eef2132a90e158a981e12583903eb516ba7c50 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 19 Jan 2018 13:50:37 -0800 Subject: [PATCH 4/5] Add missing encoding --- logging/google/cloud/logging_v2/proto/logging_config_pb2.py | 1 + 1 file changed, 1 insertion(+) diff --git a/logging/google/cloud/logging_v2/proto/logging_config_pb2.py b/logging/google/cloud/logging_v2/proto/logging_config_pb2.py index 5bcaeba1e9d1..9fa39a546e85 100644 --- a/logging/google/cloud/logging_v2/proto/logging_config_pb2.py +++ b/logging/google/cloud/logging_v2/proto/logging_config_pb2.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/logging_v2/proto/logging_config.proto From 24fd8e5191412870fe8dbb0d6d7781e3d2740563 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 19 Jan 2018 14:04:19 -0800 Subject: [PATCH 5/5] Remove unused test helper --- logging/tests/unit/test__gapic.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/logging/tests/unit/test__gapic.py b/logging/tests/unit/test__gapic.py index ffa7e7e6d7f0..c578d6d852bb 100644 --- a/logging/tests/unit/test__gapic.py +++ b/logging/tests/unit/test__gapic.py @@ -35,11 +35,6 @@ FILTER = 'logName:syslog AND severity>=ERROR' -def make_credentials(): - return mock.create_autospec( - google.auth.credentials.Credentials, instance=True) - - class Test_LoggingAPI(object): LOG_NAME = 'log_name' LOG_PATH = 'projects/%s/logs/%s' % (PROJECT, LOG_NAME)